From fdd7c30ea38a4bd1f0bea4a9c9cf0b264b7732b9 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Mon, 6 Apr 2026 15:38:56 +0000 Subject: [PATCH] feat: Updated OpenAPI spec --- ...ionClient.ImageImageAiDetectionCreate.g.cs | 22 +- ...ctionClient.TextTextAiDetectionCreate.g.cs | 22 +- .../Generated/EdenAI.AiDetectionClient.g.cs | 2 +- ...Client.OcrOcrAnonymizationAsyncCreate.g.cs | 4 +- ...ient.OcrOcrAnonymizationAsyncRetrieve.g.cs | 4 +- ...ent.OcrOcrAnonymizationAsyncRetrieve2.g.cs | 20 +- .../EdenAI.AnonymizationAsyncClient.g.cs | 2 +- ...nClient.ImageImageAnonymizationCreate.g.cs | 22 +- ...ionClient.TextTextAnonymizationCreate.g.cs | 22 +- .../Generated/EdenAI.AnonymizationClient.g.cs | 2 +- ...TranslationAutomaticTranslationCreate.g.cs | 22 +- .../EdenAI.AutomaticTranslationClient.g.cs | 2 +- ...tomlClassificationCreateProjectCreate.g.cs | 22 +- ...tomlClassificationDeleteProjectCreate.g.cs | 22 +- ...omlClassificationListProjectsRetrieve.g.cs | 20 +- ...utomlClassificationPredictAsyncCreate.g.cs | 4 +- ...omlClassificationPredictAsyncRetrieve.g.cs | 4 +- ...mlClassificationPredictAsyncRetrieve2.g.cs | 20 +- ...eAutomlClassificationTrainAsyncCreate.g.cs | 4 +- ...utomlClassificationTrainAsyncRetrieve.g.cs | 4 +- ...tomlClassificationTrainAsyncRetrieve2.g.cs | 20 +- ...mlClassificationUploadDataAsyncCreate.g.cs | 4 +- ...ClassificationUploadDataAsyncRetrieve.g.cs | 4 +- ...lassificationUploadDataAsyncRetrieve2.g.cs | 20 +- .../EdenAI.AutomlClassificationClient.g.cs | 2 +- ...ent.ImageImageBackgroundRemovalCreate.g.cs | 22 +- .../EdenAI.BackgroundRemovalClient.g.cs | 2 +- ...ngClient.OcrOcrBankCheckParsingCreate.g.cs | 22 +- .../EdenAI.BankCheckParsingClient.g.cs | 2 +- .../EdenAI.BatchClient.BatchBatchList.g.cs | 4 +- ...EdenAI.BatchClient.FeatureBatchCreate.g.cs | 22 +- ...enAI.BatchClient.FeatureBatchRetrieve.g.cs | 20 +- .../EdenAI/Generated/EdenAI.BatchClient.g.cs | 2 +- .../EdenAI.ChatClient.LlmLlmChatCreate.g.cs | 22 +- ...Client.MultimodalMultimodalChatCreate.g.cs | 22 +- .../EdenAI.ChatClient.TextTextChatCreate.g.cs | 22 +- ...I.ChatClient.TextTextChatStreamCreate.g.cs | 2 +- .../EdenAI/Generated/EdenAI.ChatClient.g.cs | 2 +- ...onClient.TextTextCodeGenerationCreate.g.cs | 22 +- .../EdenAI.CodeGenerationClient.g.cs | 2 +- ....CostManagementCostManagementRetrieve.g.cs | 20 +- ...ManagementDisplayNameOfYourSubfeature.g.cs | 4 +- .../EdenAI.CostMonitoringClient.g.cs | 2 +- ...iproductsAskyodaV2ConversationsCreate.g.cs | 6 +- ...t.AiproductsAiproductsAskyodaV2Create.g.cs | 22 +- ...I.CustomChatbotCreateOperationsClient.g.cs | 2 +- ...oductsAskyodaV2DeleteAllChunksDestroy.g.cs | 20 +- ...AiproductsAskyodaV2DeleteChunkDestroy.g.cs | 20 +- ...I.CustomChatbotDeleteOperationsClient.g.cs | 2 +- ...sAiproductsAskyodaV2ConversationsList.g.cs | 4 +- ...roductsAskyodaV2ConversationsRetrieve.g.cs | 4 +- ...iproductsAiproductsAskyodaV2FilesList.g.cs | 4 +- ...ductsAiproductsAskyodaV2FilesRetrieve.g.cs | 4 +- ...oductsAiproductsAskyodaV2InfoRetrieve.g.cs | 20 +- ...iproductsAskyodaV2ListChunksIdsCreate.g.cs | 22 +- ...ationsClient.AiproductsAiproductsList.g.cs | 4 +- ...nsClient.AiproductsAiproductsRetrieve.g.cs | 4 +- ...nAI.CustomChatbotInfoOperationsClient.g.cs | 2 +- ...oductsAiproductsAskyodaV2AskLlmCreate.g.cs | 22 +- ...uctsAiproductsAskyodaV2GenerateCreate.g.cs | 22 +- ...roductsAiproductsAskyodaV2QueryCreate.g.cs | 22 +- ...AI.CustomChatbotQueryOperationsClient.g.cs | 2 +- ...tsAskyodaV2ConversationsPartialUpdate.g.cs | 6 +- ...iproductsAskyodaV2ConversationsUpdate.g.cs | 6 +- ...tsAskyodaV2UpdateProjectPartialUpdate.g.cs | 6 +- ...I.CustomChatbotUpdateOperationsClient.g.cs | 2 +- ...ductsAiproductsAskyodaV2AddFileCreate.g.cs | 2 +- ...uctsAiproductsAskyodaV2AddImageCreate.g.cs | 2 +- ...ductsAiproductsAskyodaV2AddTextCreate.g.cs | 2 +- ...oductsAiproductsAskyodaV2AddUrlCreate.g.cs | 2 +- ...uctsAiproductsAskyodaV2AddVideoCreate.g.cs | 2 +- ...I.CustomChatbotUploadOperationsClient.g.cs | 2 +- ...crOcrCustomDocumentParsingAsyncCreate.g.cs | 4 +- ...OcrCustomDocumentParsingAsyncRetrieve.g.cs | 4 +- ...crCustomDocumentParsingAsyncRetrieve2.g.cs | 20 +- ...enAI.CustomDocumentParsingAsyncClient.g.cs | 2 +- ...tionClient.OcrOcrDataExtractionCreate.g.cs | 22 +- .../EdenAI.DataExtractionClient.g.cs | 2 +- ...ideoVideoDeepfakeDetectionAsyncCreate.g.cs | 4 +- ...eoVideoDeepfakeDetectionAsyncRetrieve.g.cs | 4 +- ...oVideoDeepfakeDetectionAsyncRetrieve2.g.cs | 20 +- .../EdenAI.DeepfakeDetectionAsyncClient.g.cs | 2 +- ...ent.ImageImageDeepfakeDetectionCreate.g.cs | 22 +- .../EdenAI.DeepfakeDetectionClient.g.cs | 2 +- ...nTranslationDocumentTranslationCreate.g.cs | 22 +- .../EdenAI.DocumentTranslationClient.g.cs | 2 +- .../EdenAI/Generated/EdenAI.EdenAIClient.g.cs | 224 +- ...ingsClient.ImageImageEmbeddingsCreate.g.cs | 22 +- ...ddingsClient.TextTextEmbeddingsCreate.g.cs | 22 +- .../Generated/EdenAI.EmbeddingsClient.g.cs | 2 +- ...Client.TextTextEmotionDetectionCreate.g.cs | 22 +- .../EdenAI.EmotionDetectionClient.g.cs | 2 +- .../EdenAI.EnabledFeaturesClient.g.cs | 2 +- ...tClient.TextTextEntitySentimentCreate.g.cs | 22 +- .../EdenAI.EntitySentimentClient.g.cs | 2 +- ...lient.WorkflowWorkflowExecutionCreate.g.cs | 6 +- ...sClient.WorkflowWorkflowExecutionList.g.cs | 4 +- ...ent.WorkflowWorkflowExecutionRetrieve.g.cs | 4 +- .../Generated/EdenAI.ExecutionsClient.g.cs | 2 +- ...lient.ImageImageExplicitContentCreate.g.cs | 22 +- .../EdenAI.ExplicitContentClient.g.cs | 2 +- ...eoExplicitContentDetectionAsyncCreate.g.cs | 4 +- ...ExplicitContentDetectionAsyncRetrieve.g.cs | 4 +- ...xplicitContentDetectionAsyncRetrieve2.g.cs | 20 +- ...I.ExplicitContentDetectionAsyncClient.g.cs | 2 +- ...areClient.ImageImageFaceCompareCreate.g.cs | 22 +- .../Generated/EdenAI.FaceCompareClient.g.cs | 2 +- ...nt.VideoVideoFaceDetectionAsyncCreate.g.cs | 4 +- ....VideoVideoFaceDetectionAsyncRetrieve.g.cs | 4 +- ...VideoVideoFaceDetectionAsyncRetrieve2.g.cs | 20 +- .../EdenAI.FaceDetectionAsyncClient.g.cs | 2 +- ...nClient.ImageImageFaceDetectionCreate.g.cs | 22 +- .../Generated/EdenAI.FaceDetectionClient.g.cs | 2 +- ...mageImageFaceRecognitionAddFaceCreate.g.cs | 22 +- ...eImageFaceRecognitionDeleteFaceCreate.g.cs | 22 +- ...ImageFaceRecognitionListFacesRetrieve.g.cs | 20 +- ...geImageFaceRecognitionRecognizeCreate.g.cs | 22 +- .../EdenAI.FaceRecognitionClient.g.cs | 2 +- ...serClient.OcrOcrFinancialParserCreate.g.cs | 22 +- .../EdenAI.FinancialParserClient.g.cs | 2 +- ...lient.VideoVideoGenerationAsyncCreate.g.cs | 4 +- ...ent.VideoVideoGenerationAsyncRetrieve.g.cs | 4 +- ...nt.VideoVideoGenerationAsyncRetrieve2.g.cs | 20 +- .../EdenAI.GenerationAsyncClient.g.cs | 2 +- ...tionClient.ImageImageGenerationCreate.g.cs | 22 +- .../Generated/EdenAI.GenerationClient.g.cs | 2 +- .../Generated/EdenAI.IAiDetectionClient.g.cs | 2 +- .../EdenAI.IAnonymizationAsyncClient.g.cs | 2 +- .../EdenAI.IAnonymizationClient.g.cs | 2 +- .../EdenAI.IAutomaticTranslationClient.g.cs | 2 +- .../EdenAI.IAutomlClassificationClient.g.cs | 2 +- .../EdenAI.IBackgroundRemovalClient.g.cs | 2 +- .../EdenAI.IBankCheckParsingClient.g.cs | 2 +- .../EdenAI/Generated/EdenAI.IBatchClient.g.cs | 2 +- .../EdenAI/Generated/EdenAI.IChatClient.g.cs | 2 +- .../EdenAI.ICodeGenerationClient.g.cs | 2 +- .../EdenAI.ICostMonitoringClient.g.cs | 2 +- ....ICustomChatbotCreateOperationsClient.g.cs | 2 +- ....ICustomChatbotDeleteOperationsClient.g.cs | 2 +- ...AI.ICustomChatbotInfoOperationsClient.g.cs | 2 +- ...I.ICustomChatbotQueryOperationsClient.g.cs | 2 +- ....ICustomChatbotUpdateOperationsClient.g.cs | 2 +- ....ICustomChatbotUploadOperationsClient.g.cs | 2 +- ...nAI.ICustomDocumentParsingAsyncClient.g.cs | 2 +- .../EdenAI.IDataExtractionClient.g.cs | 2 +- .../EdenAI.IDeepfakeDetectionAsyncClient.g.cs | 2 +- .../EdenAI.IDeepfakeDetectionClient.g.cs | 2 +- .../EdenAI.IDocumentTranslationClient.g.cs | 2 +- .../Generated/EdenAI.IEdenAIClient.g.cs | 2 +- .../Generated/EdenAI.IEmbeddingsClient.g.cs | 2 +- .../EdenAI.IEmotionDetectionClient.g.cs | 2 +- .../EdenAI.IEnabledFeaturesClient.g.cs | 2 +- .../EdenAI.IEntitySentimentClient.g.cs | 2 +- .../Generated/EdenAI.IExecutionsClient.g.cs | 2 +- .../EdenAI.IExplicitContentClient.g.cs | 2 +- ....IExplicitContentDetectionAsyncClient.g.cs | 2 +- .../Generated/EdenAI.IFaceCompareClient.g.cs | 2 +- .../EdenAI.IFaceDetectionAsyncClient.g.cs | 2 +- .../EdenAI.IFaceDetectionClient.g.cs | 2 +- .../EdenAI.IFaceRecognitionClient.g.cs | 2 +- .../EdenAI.IFinancialParserClient.g.cs | 2 +- .../EdenAI.IGenerationAsyncClient.g.cs | 2 +- .../Generated/EdenAI.IGenerationClient.g.cs | 2 +- .../EdenAI.IIdentityParserClient.g.cs | 2 +- .../EdenAI/Generated/EdenAI.IInfosClient.g.cs | 2 +- .../EdenAI.IKeywordExtractionClient.g.cs | 2 +- .../EdenAI.ILabelDetectionAsyncClient.g.cs | 2 +- .../EdenAI.ILandmarkDetectionClient.g.cs | 2 +- .../EdenAI.ILanguageDetectionClient.g.cs | 2 +- .../EdenAI.ILogoDetectionAsyncClient.g.cs | 2 +- .../EdenAI.ILogoDetectionClient.g.cs | 2 +- .../Generated/EdenAI.IModerationClient.g.cs | 2 +- .../EdenAI.INamedEntityRecognitionClient.g.cs | 2 +- .../EdenAI.IObjectDetectionClient.g.cs | 2 +- .../EdenAI.IObjectTrackingAsyncClient.g.cs | 2 +- .../Generated/EdenAI.IOcrAsyncClient.g.cs | 2 +- .../EdenAI/Generated/EdenAI.IOcrClient.g.cs | 2 +- .../EdenAI.IOcrTablesAsyncClient.g.cs | 2 +- .../EdenAI.IPersonTrackingAsyncClient.g.cs | 2 +- .../EdenAI.IPlagiaDetectionClient.g.cs | 2 +- .../EdenAI.IPromptOptimizationClient.g.cs | 2 +- .../Generated/EdenAI.IPromptsClient.g.cs | 2 +- .../EdenAI.IQuestionAnswerAsyncClient.g.cs | 2 +- .../EdenAI.IQuestionAnswerClient.g.cs | 2 +- .../Generated/EdenAI.IResourcesClient.g.cs | 2 +- .../Generated/EdenAI.IResumeParserClient.g.cs | 2 +- .../Generated/EdenAI.ISearchClient.g.cs | 2 +- .../EdenAI.ISentimentAnalysisClient.g.cs | 2 +- ...denAI.IShotChangeDetectionAsyncClient.g.cs | 2 +- .../EdenAI.ISpeechToTextAsyncClient.g.cs | 2 +- .../Generated/EdenAI.ISpellCheckClient.g.cs | 2 +- .../Generated/EdenAI.ISummarizeClient.g.cs | 2 +- .../EdenAI.ITextDetectionAsyncClient.g.cs | 2 +- .../EdenAI.ITextToSpeechAsyncClient.g.cs | 2 +- .../Generated/EdenAI.ITextToSpeechClient.g.cs | 2 +- .../EdenAI.ITopicExtractionClient.g.cs | 2 +- .../EdenAI.IUserManagementClient.g.cs | 2 +- .../Generated/EdenAI.IWebhookClient.g.cs | 2 +- .../Generated/EdenAI.IWorkflowsClient.g.cs | 2 +- ...rserClient.OcrOcrIdentityParserCreate.g.cs | 22 +- .../EdenAI.IdentityParserClient.g.cs | 2 +- ...lient.InfoInfoProviderSubfeaturesList.g.cs | 4 +- .../EdenAI/Generated/EdenAI.InfosClient.g.cs | 2 +- .../EdenAI.JsonConverters.OneOf2.g.cs | 52 +- .../EdenAI.JsonConverters.OneOf3.g.cs | 74 +- .../EdenAI.JsonSerializerContext.g.cs | 814 +++++ .../EdenAI.JsonSerializerContextTypes.g.cs | 2650 +++++++++++++++++ ...lient.TextTextKeywordExtractionCreate.g.cs | 22 +- .../EdenAI.KeywordExtractionClient.g.cs | 2 +- ...t.VideoVideoLabelDetectionAsyncCreate.g.cs | 4 +- ...VideoVideoLabelDetectionAsyncRetrieve.g.cs | 4 +- ...ideoVideoLabelDetectionAsyncRetrieve2.g.cs | 20 +- .../EdenAI.LabelDetectionAsyncClient.g.cs | 2 +- ...ent.ImageImageLandmarkDetectionCreate.g.cs | 22 +- .../EdenAI.LandmarkDetectionClient.g.cs | 2 +- ...ionTranslationLanguageDetectionCreate.g.cs | 22 +- .../EdenAI.LanguageDetectionClient.g.cs | 2 +- ...nt.VideoVideoLogoDetectionAsyncCreate.g.cs | 4 +- ....VideoVideoLogoDetectionAsyncRetrieve.g.cs | 4 +- ...VideoVideoLogoDetectionAsyncRetrieve2.g.cs | 20 +- .../EdenAI.LogoDetectionAsyncClient.g.cs | 2 +- ...nClient.ImageImageLogoDetectionCreate.g.cs | 22 +- .../Generated/EdenAI.LogoDetectionClient.g.cs | 2 +- ...AI.Models.AudiospeechToTextAsyncModel.g.cs | 112 +- ...Models.AudiotextToSpeechResponseModel.g.cs | 54 +- ...s.ImagebackgroundRemovalResponseModel.g.cs | 52 +- ...I.Models.ImageembeddingsResponseModel.g.cs | 24 +- ...els.ImageexplicitContentResponseModel.g.cs | 56 +- ....Models.ImagefaceCompareResponseModel.g.cs | 24 +- ...odels.ImagefaceDetectionResponseModel.g.cs | 24 +- ...els.ImagefaceRecognitionResponseModel.g.cs | 18 +- ...I.Models.ImagegenerationResponseModel.g.cs | 54 +- ...s.ImagelandmarkDetectionResponseModel.g.cs | 18 +- ...odels.ImagelogoDetectionResponseModel.g.cs | 56 +- ...els.ImageobjectDetectionResponseModel.g.cs | 46 +- ...dels.ImagequestionAnswerResponseModel.g.cs | 24 +- ...denAI.Models.ImagesearchResponseModel.g.cs | 18 +- .../EdenAI.Models.LlmchatResponseModel.g.cs | 242 +- ...AI.Models.MultimodalchatResponseModel.g.cs | 34 +- ...nAI.Models.OcranonymizationAsyncModel.g.cs | 20 +- ...dels.OcrbankCheckParsingResponseModel.g.cs | 24 +- ...Models.OcrdataExtractionResponseModel.g.cs | 18 +- ...odels.OcrfinancialParserResponseModel.g.cs | 96 +- ...Models.OcridentityParserResponseModel.g.cs | 44 +- .../EdenAI.Models.OcrocrAsyncModel.g.cs | 24 +- .../EdenAI.Models.OcrocrResponseModel.g.cs | 66 +- .../EdenAI.Models.OcrocrTablesAsyncModel.g.cs | 18 +- ...I.Models.OcrresumeParserResponseModel.g.cs | 50 +- ...Models.TextanonymizationResponseModel.g.cs | 48 +- .../EdenAI.Models.TextchatResponseModel.g.cs | 118 +- ...AI.Models.TextembeddingsResponseModel.g.cs | 56 +- ...ls.TextkeywordExtractionResponseModel.g.cs | 58 +- ...AI.Models.TextmoderationResponseModel.g.cs | 18 +- ...xtnamedEntityRecognitionResponseModel.g.cs | 44 +- ...ls.TextsentimentAnalysisResponseModel.g.cs | 76 +- ...AI.Models.TextspellCheckResponseModel.g.cs | 32 +- ...nAI.Models.TextsummarizeResponseModel.g.cs | 84 +- ...dels.TexttopicExtractionResponseModel.g.cs | 24 +- ...tionautomaticTranslationResponseModel.g.cs | 56 +- ...slationlanguageDetectionResponseModel.g.cs | 44 +- ...enAI.Models.VideogenerationAsyncModel.g.cs | 38 +- ...I.Models.VideologoDetectionAsyncModel.g.cs | 18 +- ...I.Models.VideotextDetectionAsyncModel.g.cs | 18 +- ...rationClient.TextTextModerationCreate.g.cs | 22 +- .../Generated/EdenAI.ModerationClient.g.cs | 2 +- ....TextTextNamedEntityRecognitionCreate.g.cs | 22 +- .../EdenAI.NamedEntityRecognitionClient.g.cs | 2 +- ...lient.ImageImageObjectDetectionCreate.g.cs | 22 +- .../EdenAI.ObjectDetectionClient.g.cs | 2 +- ...t.VideoVideoObjectTrackingAsyncCreate.g.cs | 4 +- ...VideoVideoObjectTrackingAsyncRetrieve.g.cs | 4 +- ...ideoVideoObjectTrackingAsyncRetrieve2.g.cs | 20 +- .../EdenAI.ObjectTrackingAsyncClient.g.cs | 2 +- ...I.OcrAsyncClient.OcrOcrOcrAsyncCreate.g.cs | 4 +- ...OcrAsyncClient.OcrOcrOcrAsyncRetrieve.g.cs | 4 +- ...crAsyncClient.OcrOcrOcrAsyncRetrieve2.g.cs | 20 +- .../Generated/EdenAI.OcrAsyncClient.g.cs | 2 +- .../EdenAI.OcrClient.OcrOcrOcrCreate.g.cs | 22 +- .../EdenAI/Generated/EdenAI.OcrClient.g.cs | 2 +- ...syncClient.OcrOcrOcrTablesAsyncCreate.g.cs | 4 +- ...ncClient.OcrOcrOcrTablesAsyncRetrieve.g.cs | 4 +- ...cClient.OcrOcrOcrTablesAsyncRetrieve2.g.cs | 20 +- .../EdenAI.OcrTablesAsyncClient.g.cs | 2 +- ...t.VideoVideoPersonTrackingAsyncCreate.g.cs | 4 +- ...VideoVideoPersonTrackingAsyncRetrieve.g.cs | 4 +- ...ideoVideoPersonTrackingAsyncRetrieve2.g.cs | 20 +- .../EdenAI.PersonTrackingAsyncClient.g.cs | 2 +- ...nClient.TextTextPlagiaDetectionCreate.g.cs | 22 +- .../EdenAI.PlagiaDetectionClient.g.cs | 2 +- .../EdenAI/Generated/EdenAI.Polyfills.g.cs | 8 + ...ient.TextTextPromptOptimizationCreate.g.cs | 22 +- .../EdenAI.PromptOptimizationClient.g.cs | 2 +- ...AI.PromptsClient.PromptsPromptsCreate.g.cs | 6 +- ...I.PromptsClient.PromptsPromptsCreate2.g.cs | 6 +- ...ptsClient.PromptsPromptsHistoryCreate.g.cs | 6 +- ...omptsClient.PromptsPromptsHistoryList.g.cs | 4 +- ...nt.PromptsPromptsHistoryPartialUpdate.g.cs | 6 +- ...sClient.PromptsPromptsHistoryRetrieve.g.cs | 4 +- ...ptsClient.PromptsPromptsHistoryUpdate.g.cs | 6 +- ...enAI.PromptsClient.PromptsPromptsList.g.cs | 4 +- ...ptsClient.PromptsPromptsPartialUpdate.g.cs | 6 +- ....PromptsClient.PromptsPromptsRetrieve.g.cs | 4 +- ...AI.PromptsClient.PromptsPromptsUpdate.g.cs | 6 +- .../Generated/EdenAI.PromptsClient.g.cs | 2 +- ...t.VideoVideoQuestionAnswerAsyncCreate.g.cs | 4 +- ...VideoVideoQuestionAnswerAsyncRetrieve.g.cs | 4 +- ...ideoVideoQuestionAnswerAsyncRetrieve2.g.cs | 20 +- .../EdenAI.QuestionAnswerAsyncClient.g.cs | 2 +- ...Client.ImageImageQuestionAnswerCreate.g.cs | 22 +- ...Client.VideoVideoQuestionAnswerCreate.g.cs | 22 +- .../EdenAI.QuestionAnswerClient.g.cs | 2 +- ...sClient.ResourcesResourcesAssetCreate.g.cs | 6 +- ....ResourcesResourcesAssetPartialUpdate.g.cs | 6 +- ...lient.ResourcesResourcesAssetRetrieve.g.cs | 4 +- ...sClient.ResourcesResourcesAssetUpdate.g.cs | 6 +- ...ourcesClient.ResourcesResourcesCreate.g.cs | 6 +- ...esourcesClient.ResourcesResourcesList.g.cs | 4 +- ...lient.ResourcesResourcesPartialUpdate.g.cs | 6 +- ...rcesClient.ResourcesResourcesRetrieve.g.cs | 4 +- ...ourcesClient.ResourcesResourcesUpdate.g.cs | 6 +- .../Generated/EdenAI.ResourcesClient.g.cs | 2 +- ...ParserClient.OcrOcrResumeParserCreate.g.cs | 22 +- .../Generated/EdenAI.ResumeParserClient.g.cs | 2 +- ...ent.ImageImageSearchDeleteImageCreate.g.cs | 22 +- ...ient.ImageImageSearchGetImageRetrieve.g.cs | 20 +- ...ent.ImageImageSearchGetImagesRetrieve.g.cs | 20 +- ...mageImageSearchLaunchSimilarityCreate.g.cs | 22 +- ...ent.ImageImageSearchUploadImageCreate.g.cs | 22 +- .../EdenAI/Generated/EdenAI.SearchClient.g.cs | 2 +- ...lient.TextTextSentimentAnalysisCreate.g.cs | 22 +- .../EdenAI.SentimentAnalysisClient.g.cs | 2 +- ...eoVideoShotChangeDetectionAsyncCreate.g.cs | 4 +- ...VideoShotChangeDetectionAsyncRetrieve.g.cs | 4 +- ...ideoShotChangeDetectionAsyncRetrieve2.g.cs | 20 +- ...EdenAI.ShotChangeDetectionAsyncClient.g.cs | 2 +- ...ent.AudioAudioSpeechToTextAsyncCreate.g.cs | 4 +- ...t.AudioAudioSpeechToTextAsyncRetrieve.g.cs | 4 +- ....AudioAudioSpeechToTextAsyncRetrieve2.g.cs | 20 +- .../EdenAI.SpeechToTextAsyncClient.g.cs | 2 +- ...lCheckClient.TextTextSpellCheckCreate.g.cs | 22 +- .../Generated/EdenAI.SpellCheckClient.g.cs | 2 +- ...mmarizeClient.TextTextSummarizeCreate.g.cs | 22 +- .../Generated/EdenAI.SummarizeClient.g.cs | 2 +- ...nt.VideoVideoTextDetectionAsyncCreate.g.cs | 4 +- ....VideoVideoTextDetectionAsyncRetrieve.g.cs | 4 +- ...VideoVideoTextDetectionAsyncRetrieve2.g.cs | 20 +- .../EdenAI.TextDetectionAsyncClient.g.cs | 2 +- ...ent.AudioAudioTextToSpeechAsyncCreate.g.cs | 4 +- ...t.AudioAudioTextToSpeechAsyncRetrieve.g.cs | 4 +- ....AudioAudioTextToSpeechAsyncRetrieve2.g.cs | 20 +- .../EdenAI.TextToSpeechAsyncClient.g.cs | 2 +- ...chClient.AudioAudioTextToSpeechCreate.g.cs | 22 +- .../Generated/EdenAI.TextToSpeechClient.g.cs | 2 +- ...nClient.TextTextTopicExtractionCreate.g.cs | 22 +- .../EdenAI.TopicExtractionClient.g.cs | 2 +- ...ementClient.UserUserCustomTokenCreate.g.cs | 6 +- ...agementClient.UserUserCustomTokenList.g.cs | 4 +- ...ient.UserUserCustomTokenPartialUpdate.g.cs | 6 +- ...entClient.UserUserCustomTokenRetrieve.g.cs | 4 +- .../EdenAI.UserManagementClient.g.cs | 2 +- ....WorkflowWorkflowWebhookPartialUpdate.g.cs | 6 +- ...kClient.WorkflowWorkflowWebhookUpdate.g.cs | 6 +- .../Generated/EdenAI.WebhookClient.g.cs | 2 +- ....WorkflowsClient.WorkflowWorkflowList.g.cs | 4 +- ...kflowsClient.WorkflowWorkflowRetrieve.g.cs | 4 +- .../Generated/EdenAI.WorkflowsClient.g.cs | 2 +- src/libs/EdenAI/openapi.json | 2 +- 367 files changed, 5872 insertions(+), 2394 deletions(-) create mode 100644 src/libs/EdenAI/Generated/EdenAI.JsonSerializerContext.g.cs create mode 100644 src/libs/EdenAI/Generated/EdenAI.JsonSerializerContextTypes.g.cs diff --git a/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.ImageImageAiDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.ImageImageAiDetectionCreate.g.cs index fc632e3..a881f59 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.ImageImageAiDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.ImageImageAiDetectionCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessImageImageAiDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -109,13 +109,13 @@ partial void ProcessImageImageAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -147,13 +147,13 @@ partial void ProcessImageImageAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessImageImageAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessImageImageAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,7 +273,7 @@ partial void ProcessImageImageAiDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageaiDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageaiDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessImageImageAiDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImageaiDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageaiDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.TextTextAiDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.TextTextAiDetectionCreate.g.cs index 7ad4e51..7340794 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.TextTextAiDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.TextTextAiDetectionCreate.g.cs @@ -88,7 +88,7 @@ partial void ProcessTextTextAiDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -125,13 +125,13 @@ partial void ProcessTextTextAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessTextTextAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessTextTextAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessTextTextAiDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessTextTextAiDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextaiDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextaiDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessTextTextAiDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextaiDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextaiDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.g.cs index 53d1584..013234d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AiDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class AiDetectionClient : global::EdenAI.IAiDetectionClien /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncCreate.g.cs index fe80b8f..06983e3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncCreate.g.cs @@ -205,7 +205,7 @@ partial void ProcessOcrOcrAnonymizationAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -235,7 +235,7 @@ partial void ProcessOcrOcrAnonymizationAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve.g.cs index e04517a..1b05764 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve2.g.cs index e035020..864f50f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.OcrOcrAnonymizationAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncocranonymizationAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncocranonymizationAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessOcrOcrAnonymizationAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncocranonymizationAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncocranonymizationAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.g.cs index 1ea1e69..7681c5c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AnonymizationAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class AnonymizationAsyncClient : global::EdenAI.IAnonymiza /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.ImageImageAnonymizationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.ImageImageAnonymizationCreate.g.cs index 4eaa61b..0efc738 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.ImageImageAnonymizationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.ImageImageAnonymizationCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessImageImageAnonymizationCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -109,13 +109,13 @@ partial void ProcessImageImageAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -147,13 +147,13 @@ partial void ProcessImageImageAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessImageImageAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessImageImageAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,7 +273,7 @@ partial void ProcessImageImageAnonymizationCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageanonymizationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageanonymizationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessImageImageAnonymizationCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImageanonymizationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageanonymizationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.TextTextAnonymizationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.TextTextAnonymizationCreate.g.cs index 6744e8b..18b1115 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.TextTextAnonymizationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.TextTextAnonymizationCreate.g.cs @@ -258,7 +258,7 @@ partial void ProcessTextTextAnonymizationCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -295,13 +295,13 @@ partial void ProcessTextTextAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -333,13 +333,13 @@ partial void ProcessTextTextAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -371,13 +371,13 @@ partial void ProcessTextTextAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -409,13 +409,13 @@ partial void ProcessTextTextAnonymizationCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -459,7 +459,7 @@ partial void ProcessTextTextAnonymizationCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextanonymizationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextanonymizationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -489,7 +489,7 @@ partial void ProcessTextTextAnonymizationCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextanonymizationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextanonymizationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.g.cs index 597f627..5646b51 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AnonymizationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class AnonymizationClient : global::EdenAI.IAnonymizationC /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.TranslationTranslationAutomaticTranslationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.TranslationTranslationAutomaticTranslationCreate.g.cs index e83af84..eb4d435 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.TranslationTranslationAutomaticTranslationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.TranslationTranslationAutomaticTranslationCreate.g.cs @@ -242,7 +242,7 @@ partial void ProcessTranslationTranslationAutomaticTranslationCreateResponseCont __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -279,13 +279,13 @@ partial void ProcessTranslationTranslationAutomaticTranslationCreateResponseCont if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -317,13 +317,13 @@ partial void ProcessTranslationTranslationAutomaticTranslationCreateResponseCont if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -355,13 +355,13 @@ partial void ProcessTranslationTranslationAutomaticTranslationCreateResponseCont if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -393,13 +393,13 @@ partial void ProcessTranslationTranslationAutomaticTranslationCreateResponseCont if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -443,7 +443,7 @@ partial void ProcessTranslationTranslationAutomaticTranslationCreateResponseCont __response.EnsureSuccessStatusCode(); return - global::EdenAI.TranslationautomaticTranslationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TranslationautomaticTranslationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -473,7 +473,7 @@ partial void ProcessTranslationTranslationAutomaticTranslationCreateResponseCont ).ConfigureAwait(false); return - await global::EdenAI.TranslationautomaticTranslationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TranslationautomaticTranslationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.g.cs index 9bba93c..affd6ee 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomaticTranslationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class AutomaticTranslationClient : global::EdenAI.IAutomat /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationCreateProjectCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationCreateProjectCreate.g.cs index 6c23942..4cea6c7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationCreateProjectCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationCreateProjectCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessImageImageAutomlClassificationCreateProjectCreateResponseCon __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -109,13 +109,13 @@ partial void ProcessImageImageAutomlClassificationCreateProjectCreateResponseCon if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -147,13 +147,13 @@ partial void ProcessImageImageAutomlClassificationCreateProjectCreateResponseCon if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessImageImageAutomlClassificationCreateProjectCreateResponseCon if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessImageImageAutomlClassificationCreateProjectCreateResponseCon if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,7 +273,7 @@ partial void ProcessImageImageAutomlClassificationCreateProjectCreateResponseCon __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessImageImageAutomlClassificationCreateProjectCreateResponseCon ).ConfigureAwait(false); return - await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationDeleteProjectCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationDeleteProjectCreate.g.cs index 281ecf3..56f9154 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationDeleteProjectCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationDeleteProjectCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessImageImageAutomlClassificationDeleteProjectCreateResponseCon __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -109,13 +109,13 @@ partial void ProcessImageImageAutomlClassificationDeleteProjectCreateResponseCon if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -147,13 +147,13 @@ partial void ProcessImageImageAutomlClassificationDeleteProjectCreateResponseCon if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessImageImageAutomlClassificationDeleteProjectCreateResponseCon if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessImageImageAutomlClassificationDeleteProjectCreateResponseCon if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,7 +273,7 @@ partial void ProcessImageImageAutomlClassificationDeleteProjectCreateResponseCon __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessImageImageAutomlClassificationDeleteProjectCreateResponseCon ).ConfigureAwait(false); return - await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationListProjectsRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationListProjectsRetrieve.g.cs index ffd055c..8100898 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationListProjectsRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationListProjectsRetrieve.g.cs @@ -90,13 +90,13 @@ partial void ProcessImageImageAutomlClassificationListProjectsRetrieveResponseCo if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -128,13 +128,13 @@ partial void ProcessImageImageAutomlClassificationListProjectsRetrieveResponseCo if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -166,13 +166,13 @@ partial void ProcessImageImageAutomlClassificationListProjectsRetrieveResponseCo if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -204,13 +204,13 @@ partial void ProcessImageImageAutomlClassificationListProjectsRetrieveResponseCo if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -254,7 +254,7 @@ partial void ProcessImageImageAutomlClassificationListProjectsRetrieveResponseCo __response.EnsureSuccessStatusCode(); return - global::EdenAI.AutomlClassificationListProjectsResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AutomlClassificationListProjectsResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -284,7 +284,7 @@ partial void ProcessImageImageAutomlClassificationListProjectsRetrieveResponseCo ).ConfigureAwait(false); return - await global::EdenAI.AutomlClassificationListProjectsResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AutomlClassificationListProjectsResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncCreate.g.cs index 51345d1..bb7ce22 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncCreate.g.cs @@ -192,7 +192,7 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncCreateResponseCont __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -222,7 +222,7 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncCreateResponseCont ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve.g.cs index ce79de8..29351d4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieveResponseCo __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieveResponseCo ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve2.g.cs index f261a12..3be0ec7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationPredictAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieve2ResponseC if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieve2ResponseC if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieve2ResponseC if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieve2ResponseC if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieve2ResponseC __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessImageImageAutomlClassificationPredictAsyncRetrieve2ResponseC ).ConfigureAwait(false); return - await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncCreate.g.cs index acf5644..7d475fb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncCreate.g.cs @@ -172,7 +172,7 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncCreateResponseConten __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -202,7 +202,7 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncCreateResponseConten ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve.g.cs index 5643e31..312a42a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieveResponseCont __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieveResponseCont ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve2.g.cs index 32f4807..75c5007 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationTrainAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieve2ResponseCon if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieve2ResponseCon if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieve2ResponseCon if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieve2ResponseCon if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieve2ResponseCon __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessImageImageAutomlClassificationTrainAsyncRetrieve2ResponseCon ).ConfigureAwait(false); return - await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncCreate.g.cs index 2d543de..16bc665 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncCreate.g.cs @@ -198,7 +198,7 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncCreateResponseC __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -228,7 +228,7 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncCreateResponseC ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve.g.cs index fe3d037..ac8253f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieveRespons __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieveRespons ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve2.g.cs index df726ab..3b0bf2f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.ImageImageAutomlClassificationUploadDataAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieve2Respon if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieve2Respon if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieve2Respon if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieve2Respon if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieve2Respon __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageautomlClassificationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessImageImageAutomlClassificationUploadDataAsyncRetrieve2Respon ).ConfigureAwait(false); return - await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageautomlClassificationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.g.cs index f77fc97..435f17a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.AutomlClassificationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class AutomlClassificationClient : global::EdenAI.IAutomlC /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.ImageImageBackgroundRemovalCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.ImageImageBackgroundRemovalCreate.g.cs index e0fe5a6..feb8233 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.ImageImageBackgroundRemovalCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.ImageImageBackgroundRemovalCreate.g.cs @@ -77,7 +77,7 @@ partial void ProcessImageImageBackgroundRemovalCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -114,13 +114,13 @@ partial void ProcessImageImageBackgroundRemovalCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -152,13 +152,13 @@ partial void ProcessImageImageBackgroundRemovalCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -190,13 +190,13 @@ partial void ProcessImageImageBackgroundRemovalCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -228,13 +228,13 @@ partial void ProcessImageImageBackgroundRemovalCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -278,7 +278,7 @@ partial void ProcessImageImageBackgroundRemovalCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagebackgroundRemovalResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagebackgroundRemovalResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -308,7 +308,7 @@ partial void ProcessImageImageBackgroundRemovalCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagebackgroundRemovalResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagebackgroundRemovalResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.g.cs index 2828c88..239a439 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BackgroundRemovalClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class BackgroundRemovalClient : global::EdenAI.IBackground /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.OcrOcrBankCheckParsingCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.OcrOcrBankCheckParsingCreate.g.cs index f6807d9..b4de7fb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.OcrOcrBankCheckParsingCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.OcrOcrBankCheckParsingCreate.g.cs @@ -75,7 +75,7 @@ partial void ProcessOcrOcrBankCheckParsingCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -112,13 +112,13 @@ partial void ProcessOcrOcrBankCheckParsingCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -150,13 +150,13 @@ partial void ProcessOcrOcrBankCheckParsingCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -188,13 +188,13 @@ partial void ProcessOcrOcrBankCheckParsingCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -226,13 +226,13 @@ partial void ProcessOcrOcrBankCheckParsingCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -276,7 +276,7 @@ partial void ProcessOcrOcrBankCheckParsingCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.OcrbankCheckParsingResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.OcrbankCheckParsingResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -306,7 +306,7 @@ partial void ProcessOcrOcrBankCheckParsingCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.OcrbankCheckParsingResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.OcrbankCheckParsingResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.g.cs index 94fd4d7..b30d870 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BankCheckParsingClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class BankCheckParsingClient : global::EdenAI.IBankCheckPa /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.BatchClient.BatchBatchList.g.cs b/src/libs/EdenAI/Generated/EdenAI.BatchClient.BatchBatchList.g.cs index 358d56b..f2e0841 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BatchClient.BatchBatchList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BatchClient.BatchBatchList.g.cs @@ -101,7 +101,7 @@ partial void ProcessBatchBatchListResponseContent( __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -131,7 +131,7 @@ partial void ProcessBatchBatchListResponseContent( ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchCreate.g.cs index 1e9faa2..cb4068b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchCreate.g.cs @@ -145,7 +145,7 @@ partial void ProcessFeatureBatchCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -185,13 +185,13 @@ partial void ProcessFeatureBatchCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessFeatureBatchCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -261,13 +261,13 @@ partial void ProcessFeatureBatchCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -299,13 +299,13 @@ partial void ProcessFeatureBatchCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -349,7 +349,7 @@ partial void ProcessFeatureBatchCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.BatchLaunchResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.BatchLaunchResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -379,7 +379,7 @@ partial void ProcessFeatureBatchCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.BatchLaunchResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.BatchLaunchResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchRetrieve.g.cs index c063632..a23fb73 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BatchClient.FeatureBatchRetrieve.g.cs @@ -139,13 +139,13 @@ partial void ProcessFeatureBatchRetrieveResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -177,13 +177,13 @@ partial void ProcessFeatureBatchRetrieveResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -215,13 +215,13 @@ partial void ProcessFeatureBatchRetrieveResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -253,13 +253,13 @@ partial void ProcessFeatureBatchRetrieveResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessFeatureBatchRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PaginatedBatchResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PaginatedBatchResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -333,7 +333,7 @@ partial void ProcessFeatureBatchRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PaginatedBatchResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PaginatedBatchResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.BatchClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.BatchClient.g.cs index fde0613..d8d751a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.BatchClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.BatchClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class BatchClient : global::EdenAI.IBatchClient, global::S /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ChatClient.LlmLlmChatCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ChatClient.LlmLlmChatCreate.g.cs index f248c58..ab3455c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ChatClient.LlmLlmChatCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ChatClient.LlmLlmChatCreate.g.cs @@ -449,7 +449,7 @@ partial void ProcessLlmLlmChatCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -486,13 +486,13 @@ partial void ProcessLlmLlmChatCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -524,13 +524,13 @@ partial void ProcessLlmLlmChatCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -562,13 +562,13 @@ partial void ProcessLlmLlmChatCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -600,13 +600,13 @@ partial void ProcessLlmLlmChatCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -650,7 +650,7 @@ partial void ProcessLlmLlmChatCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LlmchatResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LlmchatResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -680,7 +680,7 @@ partial void ProcessLlmLlmChatCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LlmchatResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LlmchatResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ChatClient.MultimodalMultimodalChatCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ChatClient.MultimodalMultimodalChatCreate.g.cs index 4a169c8..8a6f33a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ChatClient.MultimodalMultimodalChatCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ChatClient.MultimodalMultimodalChatCreate.g.cs @@ -123,7 +123,7 @@ partial void ProcessMultimodalMultimodalChatCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -160,13 +160,13 @@ partial void ProcessMultimodalMultimodalChatCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -198,13 +198,13 @@ partial void ProcessMultimodalMultimodalChatCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -236,13 +236,13 @@ partial void ProcessMultimodalMultimodalChatCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,13 +274,13 @@ partial void ProcessMultimodalMultimodalChatCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -324,7 +324,7 @@ partial void ProcessMultimodalMultimodalChatCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.MultimodalchatResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.MultimodalchatResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -354,7 +354,7 @@ partial void ProcessMultimodalMultimodalChatCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.MultimodalchatResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.MultimodalchatResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatCreate.g.cs index e55645d..20973ea 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatCreate.g.cs @@ -406,7 +406,7 @@ partial void ProcessTextTextChatCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -443,13 +443,13 @@ partial void ProcessTextTextChatCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -481,13 +481,13 @@ partial void ProcessTextTextChatCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -519,13 +519,13 @@ partial void ProcessTextTextChatCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -557,13 +557,13 @@ partial void ProcessTextTextChatCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -607,7 +607,7 @@ partial void ProcessTextTextChatCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextchatResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextchatResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -637,7 +637,7 @@ partial void ProcessTextTextChatCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextchatResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextchatResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatStreamCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatStreamCreate.g.cs index 580640e..e28e439 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatStreamCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ChatClient.TextTextChatStreamCreate.g.cs @@ -69,7 +69,7 @@ partial void ProcessTextTextChatStreamCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, diff --git a/src/libs/EdenAI/Generated/EdenAI.ChatClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ChatClient.g.cs index 226b897..9d8517d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ChatClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ChatClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ChatClient : global::EdenAI.IChatClient, global::Sys /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.TextTextCodeGenerationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.TextTextCodeGenerationCreate.g.cs index 837d829..076ea32 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.TextTextCodeGenerationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.TextTextCodeGenerationCreate.g.cs @@ -248,7 +248,7 @@ partial void ProcessTextTextCodeGenerationCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -285,13 +285,13 @@ partial void ProcessTextTextCodeGenerationCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -323,13 +323,13 @@ partial void ProcessTextTextCodeGenerationCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -361,13 +361,13 @@ partial void ProcessTextTextCodeGenerationCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -399,13 +399,13 @@ partial void ProcessTextTextCodeGenerationCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -449,7 +449,7 @@ partial void ProcessTextTextCodeGenerationCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextcodeGenerationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextcodeGenerationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -479,7 +479,7 @@ partial void ProcessTextTextCodeGenerationCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextcodeGenerationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextcodeGenerationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.g.cs index ab9a60e..ee5a691 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CodeGenerationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CodeGenerationClient : global::EdenAI.ICodeGeneratio /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementCostManagementRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementCostManagementRetrieve.g.cs index 7d76820..52b02bb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementCostManagementRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementCostManagementRetrieve.g.cs @@ -147,13 +147,13 @@ partial void ProcessCostManagementCostManagementRetrieveResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessCostManagementCostManagementRetrieveResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessCostManagementCostManagementRetrieveResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -261,13 +261,13 @@ partial void ProcessCostManagementCostManagementRetrieveResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -311,7 +311,7 @@ partial void ProcessCostManagementCostManagementRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.CostMonitoringResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.CostMonitoringResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -341,7 +341,7 @@ partial void ProcessCostManagementCostManagementRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.CostMonitoringResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.CostMonitoringResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementDisplayNameOfYourSubfeature.g.cs b/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementDisplayNameOfYourSubfeature.g.cs index 7440221..3668e15 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementDisplayNameOfYourSubfeature.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.CostManagementDisplayNameOfYourSubfeature.g.cs @@ -102,7 +102,7 @@ partial void ProcessCostManagementDisplayNameOfYourSubfeatureResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.CreditsSerializer.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.CreditsSerializer.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -132,7 +132,7 @@ partial void ProcessCostManagementDisplayNameOfYourSubfeatureResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.CreditsSerializer.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.CreditsSerializer.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.g.cs index a8c4af4..c488e4f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CostMonitoringClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CostMonitoringClient : global::EdenAI.ICostMonitorin /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsCreate.g.cs index 95f41ad..c0e59c1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsCreateResponseCont __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsCreateResponseCont __response.EnsureSuccessStatusCode(); return - global::EdenAI.Conversation.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.Conversation.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsCreateResponseCont ).ConfigureAwait(false); return - await global::EdenAI.Conversation.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.Conversation.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2Create.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2Create.g.cs index ffdda41..c43d563 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2Create.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.AiproductsAiproductsAskyodaV2Create.g.cs @@ -77,7 +77,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2CreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -114,13 +114,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2CreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -152,13 +152,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2CreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -190,13 +190,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2CreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -228,13 +228,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2CreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -278,7 +278,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2CreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaCreateProjectResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaCreateProjectResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -308,7 +308,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2CreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.YodaCreateProjectResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaCreateProjectResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.g.cs index 5de6a20..69cde03 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotCreateOperationsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CustomChatbotCreateOperationsClient : global::EdenAI /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteAllChunksDestroy.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteAllChunksDestroy.g.cs index 6429862..d791fac 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteAllChunksDestroy.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteAllChunksDestroy.g.cs @@ -97,13 +97,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteAllChunksDestroyResponseC if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -135,13 +135,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteAllChunksDestroyResponseC if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -173,13 +173,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteAllChunksDestroyResponseC if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -211,13 +211,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteAllChunksDestroyResponseC if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -261,7 +261,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteAllChunksDestroyResponseC __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaDeleteResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaDeleteResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -291,7 +291,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteAllChunksDestroyResponseC ).ConfigureAwait(false); return - await global::EdenAI.YodaDeleteResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaDeleteResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteChunkDestroy.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteChunkDestroy.g.cs index 0ac5464..1dc7341 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteChunkDestroy.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.AiproductsAiproductsAskyodaV2DeleteChunkDestroy.g.cs @@ -118,13 +118,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteChunkDestroyResponseConte if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -156,13 +156,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteChunkDestroyResponseConte if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -194,13 +194,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteChunkDestroyResponseConte if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -232,13 +232,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteChunkDestroyResponseConte if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -282,7 +282,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteChunkDestroyResponseConte __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaDeleteResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaDeleteResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -312,7 +312,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2DeleteChunkDestroyResponseConte ).ConfigureAwait(false); return - await global::EdenAI.YodaDeleteResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaDeleteResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.g.cs index f767205..000b2fd 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotDeleteOperationsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CustomChatbotDeleteOperationsClient : global::EdenAI /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsList.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsList.g.cs index 9c925c5..7e1c8be 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsList.g.cs @@ -107,7 +107,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsListResponseConten __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -137,7 +137,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsListResponseConten ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsRetrieve.g.cs index 88f711e..897fc22 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ConversationsRetrieve.g.cs @@ -113,7 +113,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsRetrieveResponseCo __response.EnsureSuccessStatusCode(); return - global::EdenAI.ConversationDetail.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ConversationDetail.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -143,7 +143,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsRetrieveResponseCo ).ConfigureAwait(false); return - await global::EdenAI.ConversationDetail.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ConversationDetail.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesList.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesList.g.cs index e2da383..f57eecc 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesList.g.cs @@ -107,7 +107,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2FilesListResponseContent( __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -137,7 +137,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2FilesListResponseContent( ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesRetrieve.g.cs index 27d830a..d40972a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2FilesRetrieve.g.cs @@ -113,7 +113,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2FilesRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AiProductFile.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AiProductFile.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -143,7 +143,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2FilesRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AiProductFile.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AiProductFile.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2InfoRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2InfoRetrieve.g.cs index f502747..09b3124 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2InfoRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2InfoRetrieve.g.cs @@ -97,13 +97,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2InfoRetrieveResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -135,13 +135,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2InfoRetrieveResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -173,13 +173,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2InfoRetrieveResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -211,13 +211,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2InfoRetrieveResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -261,7 +261,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2InfoRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaInfoResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaInfoResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -291,7 +291,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2InfoRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.YodaInfoResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaInfoResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ListChunksIdsCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ListChunksIdsCreate.g.cs index 30af707..50e384f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ListChunksIdsCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsAskyodaV2ListChunksIdsCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ListChunksIdsCreateResponseCont __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -111,13 +111,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2ListChunksIdsCreateResponseCont if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -149,13 +149,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2ListChunksIdsCreateResponseCont if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -187,13 +187,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2ListChunksIdsCreateResponseCont if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -225,13 +225,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2ListChunksIdsCreateResponseCont if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -275,7 +275,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ListChunksIdsCreateResponseCont __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaListChunksIdsResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaListChunksIdsResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -305,7 +305,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ListChunksIdsCreateResponseCont ).ConfigureAwait(false); return - await global::EdenAI.YodaListChunksIdsResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaListChunksIdsResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsList.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsList.g.cs index 9db79cd..49949de 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsList.g.cs @@ -110,7 +110,7 @@ partial void ProcessAiproductsAiproductsListResponseContent( __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -140,7 +140,7 @@ partial void ProcessAiproductsAiproductsListResponseContent( ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsRetrieve.g.cs index f5352e2..93601c3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.AiproductsAiproductsRetrieve.g.cs @@ -140,7 +140,7 @@ partial void ProcessAiproductsAiproductsRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AIProject.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AIProject.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -170,7 +170,7 @@ partial void ProcessAiproductsAiproductsRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AIProject.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AIProject.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.g.cs index f86618b..df016ed 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotInfoOperationsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CustomChatbotInfoOperationsClient : global::EdenAI.I /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2AskLlmCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2AskLlmCreate.g.cs index 2922f65..7eff044 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2AskLlmCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2AskLlmCreate.g.cs @@ -75,7 +75,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AskLlmCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -113,13 +113,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2AskLlmCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -151,13 +151,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2AskLlmCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -189,13 +189,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2AskLlmCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -227,13 +227,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2AskLlmCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -277,7 +277,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AskLlmCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaQueryResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaQueryResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -307,7 +307,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AskLlmCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.YodaQueryResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaQueryResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2GenerateCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2GenerateCreate.g.cs index 4f18980..09ea1d4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2GenerateCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2GenerateCreate.g.cs @@ -74,7 +74,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2GenerateCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -112,13 +112,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2GenerateCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -150,13 +150,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2GenerateCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -188,13 +188,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2GenerateCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -226,13 +226,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2GenerateCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -276,7 +276,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2GenerateCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaQueryResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaQueryResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -306,7 +306,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2GenerateCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.YodaQueryResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaQueryResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2QueryCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2QueryCreate.g.cs index a11c15a..0a971ac 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2QueryCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.AiproductsAiproductsAskyodaV2QueryCreate.g.cs @@ -75,7 +75,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2QueryCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -113,13 +113,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2QueryCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -151,13 +151,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2QueryCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -189,13 +189,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2QueryCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -227,13 +227,13 @@ partial void ProcessAiproductsAiproductsAskyodaV2QueryCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -277,7 +277,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2QueryCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.YodaAskLlmResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.YodaAskLlmResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -307,7 +307,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2QueryCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.YodaAskLlmResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.YodaAskLlmResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.g.cs index a8fd6d0..bd44975 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotQueryOperationsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CustomChatbotQueryOperationsClient : global::EdenAI. /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsPartialUpdate.g.cs index d6d7b62..3802492 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsPartialUpdate.g.cs @@ -77,7 +77,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsPartialUpdateRespo __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -128,7 +128,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsPartialUpdateRespo __response.EnsureSuccessStatusCode(); return - global::EdenAI.ConversationDetail.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ConversationDetail.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -158,7 +158,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsPartialUpdateRespo ).ConfigureAwait(false); return - await global::EdenAI.ConversationDetail.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ConversationDetail.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsUpdate.g.cs index 4dcd6f1..a0315bb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2ConversationsUpdate.g.cs @@ -77,7 +77,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsUpdateResponseCont __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -128,7 +128,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsUpdateResponseCont __response.EnsureSuccessStatusCode(); return - global::EdenAI.ConversationDetail.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ConversationDetail.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -158,7 +158,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2ConversationsUpdateResponseCont ).ConfigureAwait(false); return - await global::EdenAI.ConversationDetail.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ConversationDetail.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2UpdateProjectPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2UpdateProjectPartialUpdate.g.cs index 4c8898b..826ac9c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2UpdateProjectPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.AiproductsAiproductsAskyodaV2UpdateProjectPartialUpdate.g.cs @@ -77,7 +77,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2UpdateProjectPartialUpdateRespo __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -127,7 +127,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2UpdateProjectPartialUpdateRespo __response.EnsureSuccessStatusCode(); return - global::EdenAI.AskYodaProjectUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AskYodaProjectUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -157,7 +157,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2UpdateProjectPartialUpdateRespo ).ConfigureAwait(false); return - await global::EdenAI.AskYodaProjectUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AskYodaProjectUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.g.cs index 036d6ce..2521fcc 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUpdateOperationsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CustomChatbotUpdateOperationsClient : global::EdenAI /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddFileCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddFileCreate.g.cs index 3428222..f3e2d00 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddFileCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddFileCreate.g.cs @@ -87,7 +87,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AddFileCreateResponse( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddImageCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddImageCreate.g.cs index 444e25b..57eaf42 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddImageCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddImageCreate.g.cs @@ -68,7 +68,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AddImageCreateResponse( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddTextCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddTextCreate.g.cs index 8999d24..6e6e901 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddTextCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddTextCreate.g.cs @@ -69,7 +69,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AddTextCreateResponse( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddUrlCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddUrlCreate.g.cs index f36d681..571bc93 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddUrlCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddUrlCreate.g.cs @@ -69,7 +69,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AddUrlCreateResponse( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddVideoCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddVideoCreate.g.cs index dc2f7ea..0808fc1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddVideoCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.AiproductsAiproductsAskyodaV2AddVideoCreate.g.cs @@ -68,7 +68,7 @@ partial void ProcessAiproductsAiproductsAskyodaV2AddVideoCreateResponse( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.g.cs index a67faeb..60558f2 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomChatbotUploadOperationsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CustomChatbotUploadOperationsClient : global::EdenAI /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncCreate.g.cs index 05d27ef..cbb7b3a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncCreate.g.cs @@ -217,7 +217,7 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -247,7 +247,7 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve.g.cs index b383a0f..94e4733 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve2.g.cs index 76e4eae..6ce84f8 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.OcrOcrCustomDocumentParsingAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncocrcustomDocumentParsingAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncocrcustomDocumentParsingAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessOcrOcrCustomDocumentParsingAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncocrcustomDocumentParsingAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncocrcustomDocumentParsingAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.g.cs index 124cb20..47ab370 100644 --- a/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.CustomDocumentParsingAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class CustomDocumentParsingAsyncClient : global::EdenAI.IC /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.OcrOcrDataExtractionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.OcrOcrDataExtractionCreate.g.cs index c505532..334bd1e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.OcrOcrDataExtractionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.OcrOcrDataExtractionCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessOcrOcrDataExtractionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessOcrOcrDataExtractionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessOcrOcrDataExtractionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessOcrOcrDataExtractionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessOcrOcrDataExtractionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessOcrOcrDataExtractionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.OcrdataExtractionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.OcrdataExtractionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessOcrOcrDataExtractionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.OcrdataExtractionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.OcrdataExtractionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.g.cs index 63fc751..af11932 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DataExtractionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class DataExtractionClient : global::EdenAI.IDataExtractio /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncCreate.g.cs index c2302ca..4ada9c8 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncCreate.g.cs @@ -185,7 +185,7 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -215,7 +215,7 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve.g.cs index 4a93b49..4515136 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve2.g.cs index 7d92a01..01829b6 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.VideoVideoDeepfakeDetectionAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideodeepfakeDetectionAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideodeepfakeDetectionAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoDeepfakeDetectionAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideodeepfakeDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideodeepfakeDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.g.cs index 33e907e..cafebe9 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class DeepfakeDetectionAsyncClient : global::EdenAI.IDeepf /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.ImageImageDeepfakeDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.ImageImageDeepfakeDetectionCreate.g.cs index 55fed66..b53356f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.ImageImageDeepfakeDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.ImageImageDeepfakeDetectionCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessImageImageDeepfakeDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -109,13 +109,13 @@ partial void ProcessImageImageDeepfakeDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -147,13 +147,13 @@ partial void ProcessImageImageDeepfakeDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessImageImageDeepfakeDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessImageImageDeepfakeDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,7 +273,7 @@ partial void ProcessImageImageDeepfakeDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagedeepfakeDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagedeepfakeDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessImageImageDeepfakeDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagedeepfakeDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagedeepfakeDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.g.cs index b9e1fb1..30a0644 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DeepfakeDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class DeepfakeDetectionClient : global::EdenAI.IDeepfakeDe /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.TranslationTranslationDocumentTranslationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.TranslationTranslationDocumentTranslationCreate.g.cs index 87ad9bc..662b982 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.TranslationTranslationDocumentTranslationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.TranslationTranslationDocumentTranslationCreate.g.cs @@ -191,7 +191,7 @@ partial void ProcessTranslationTranslationDocumentTranslationCreateResponseConte __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -228,13 +228,13 @@ partial void ProcessTranslationTranslationDocumentTranslationCreateResponseConte if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -266,13 +266,13 @@ partial void ProcessTranslationTranslationDocumentTranslationCreateResponseConte if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -304,13 +304,13 @@ partial void ProcessTranslationTranslationDocumentTranslationCreateResponseConte if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -342,13 +342,13 @@ partial void ProcessTranslationTranslationDocumentTranslationCreateResponseConte if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -392,7 +392,7 @@ partial void ProcessTranslationTranslationDocumentTranslationCreateResponseConte __response.EnsureSuccessStatusCode(); return - global::EdenAI.TranslationdocumentTranslationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TranslationdocumentTranslationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -422,7 +422,7 @@ partial void ProcessTranslationTranslationDocumentTranslationCreateResponseConte ).ConfigureAwait(false); return - await global::EdenAI.TranslationdocumentTranslationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TranslationdocumentTranslationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.g.cs index 50afac7..475a101 100644 --- a/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.DocumentTranslationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class DocumentTranslationClient : global::EdenAI.IDocument /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.EdenAIClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.EdenAIClient.g.cs index c2f7f3a..5c4326b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EdenAIClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EdenAIClient.g.cs @@ -34,85 +34,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: /// /// /// - #pragma warning disable CS0618 // Type or member is obsolete - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions - { - PropertyNameCaseInsensitive = true, - DefaultIgnoreCondition = global::System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull, - Converters = - { - new global::EdenAI.JsonConverters.BlankEnumJsonConverter(), - new global::EdenAI.JsonConverters.BlankEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.CategoryTypeJsonConverter(), - new global::EdenAI.JsonConverters.CategoryTypeNullableJsonConverter(), - new global::EdenAI.JsonConverters.ChatRoleJsonConverter(), - new global::EdenAI.JsonConverters.ChatRoleNullableJsonConverter(), - new global::EdenAI.JsonConverters.DataTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.DataTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.DbProviderEnumJsonConverter(), - new global::EdenAI.JsonConverters.DbProviderEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.DetailTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.DetailTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.DocumentTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.DocumentTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.EmbeddingsProviderEnumJsonConverter(), - new global::EdenAI.JsonConverters.EmbeddingsProviderEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.EntitySentimentEnumJsonConverter(), - new global::EdenAI.JsonConverters.EntitySentimentEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.ExecutionContentCreateStatusEnumJsonConverter(), - new global::EdenAI.JsonConverters.ExecutionContentCreateStatusEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.FallbackTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.FallbackTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.FinalStatusEnumJsonConverter(), - new global::EdenAI.JsonConverters.FinalStatusEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.GeneralSentimentEnumJsonConverter(), - new global::EdenAI.JsonConverters.GeneralSentimentEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.ImageaiDetectionAiDetectionDataClassPredictionEnumJsonConverter(), - new global::EdenAI.JsonConverters.ImageaiDetectionAiDetectionDataClassPredictionEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.OptionEnumJsonConverter(), - new global::EdenAI.JsonConverters.OptionEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.PredictionB20EnumJsonConverter(), - new global::EdenAI.JsonConverters.PredictionB20EnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.PriceUnitTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.PriceUnitTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.ProjectTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.ProjectTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.ReasoningEffortEnumJsonConverter(), - new global::EdenAI.JsonConverters.ReasoningEffortEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.RepresentationEnumJsonConverter(), - new global::EdenAI.JsonConverters.RepresentationEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.SentimentEbfEnumJsonConverter(), - new global::EdenAI.JsonConverters.SentimentEbfEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.ServiceTierEnumJsonConverter(), - new global::EdenAI.JsonConverters.ServiceTierEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.StateEnumJsonConverter(), - new global::EdenAI.JsonConverters.StateEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.Status549EnumJsonConverter(), - new global::EdenAI.JsonConverters.Status549EnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.Status889EnumJsonConverter(), - new global::EdenAI.JsonConverters.Status889EnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.SubCategoryTypeJsonConverter(), - new global::EdenAI.JsonConverters.SubCategoryTypeNullableJsonConverter(), - new global::EdenAI.JsonConverters.ThinkingTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.ThinkingTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.TokenTypeEnumJsonConverter(), - new global::EdenAI.JsonConverters.TokenTypeEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.ToolChoiceEnumJsonConverter(), - new global::EdenAI.JsonConverters.ToolChoiceEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.TypeDe8EnumJsonConverter(), - new global::EdenAI.JsonConverters.TypeDe8EnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.TypeOfDataEnumJsonConverter(), - new global::EdenAI.JsonConverters.TypeOfDataEnumNullableJsonConverter(), - new global::EdenAI.JsonConverters.FeatureBatchRetrieveStatusJsonConverter(), - new global::EdenAI.JsonConverters.FeatureBatchRetrieveStatusNullableJsonConverter(), - new global::EdenAI.JsonConverters.OneOfJsonConverter(), - new global::EdenAI.JsonConverters.OneOfJsonConverter(), - new global::EdenAI.JsonConverters.OneOfJsonConverter(), - new global::EdenAI.JsonConverters.OneOfJsonConverter(), - new global::EdenAI.JsonConverters.UnixTimestampJsonConverter(), - } - }; - #pragma warning restore CS0618 // Type or member is obsolete + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// @@ -121,7 +43,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public AiDetectionClient AiDetection => new AiDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -130,7 +52,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public AnonymizationClient Anonymization => new AnonymizationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -139,7 +61,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public AnonymizationAsyncClient AnonymizationAsync => new AnonymizationAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -148,7 +70,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public AutomaticTranslationClient AutomaticTranslation => new AutomaticTranslationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -157,7 +79,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public AutomlClassificationClient AutomlClassification => new AutomlClassificationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -166,7 +88,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public BackgroundRemovalClient BackgroundRemoval => new BackgroundRemovalClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -175,7 +97,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public BankCheckParsingClient BankCheckParsing => new BankCheckParsingClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -184,7 +106,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public BatchClient Batch => new BatchClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -193,7 +115,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ChatClient Chat => new ChatClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -202,7 +124,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CodeGenerationClient CodeGeneration => new CodeGenerationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -211,7 +133,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CostMonitoringClient CostMonitoring => new CostMonitoringClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -220,7 +142,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CustomChatbotCreateOperationsClient CustomChatbotCreateOperations => new CustomChatbotCreateOperationsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -229,7 +151,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CustomChatbotDeleteOperationsClient CustomChatbotDeleteOperations => new CustomChatbotDeleteOperationsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -238,7 +160,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CustomChatbotInfoOperationsClient CustomChatbotInfoOperations => new CustomChatbotInfoOperationsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -247,7 +169,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CustomChatbotQueryOperationsClient CustomChatbotQueryOperations => new CustomChatbotQueryOperationsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -256,7 +178,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CustomChatbotUpdateOperationsClient CustomChatbotUpdateOperations => new CustomChatbotUpdateOperationsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -265,7 +187,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CustomChatbotUploadOperationsClient CustomChatbotUploadOperations => new CustomChatbotUploadOperationsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -274,7 +196,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public CustomDocumentParsingAsyncClient CustomDocumentParsingAsync => new CustomDocumentParsingAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -283,7 +205,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public DataExtractionClient DataExtraction => new DataExtractionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -292,7 +214,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public DeepfakeDetectionClient DeepfakeDetection => new DeepfakeDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -301,7 +223,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public DeepfakeDetectionAsyncClient DeepfakeDetectionAsync => new DeepfakeDetectionAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -310,7 +232,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public DocumentTranslationClient DocumentTranslation => new DocumentTranslationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -319,7 +241,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public EmbeddingsClient Embeddings => new EmbeddingsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -328,7 +250,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public EmotionDetectionClient EmotionDetection => new EmotionDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -337,7 +259,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public EnabledFeaturesClient EnabledFeatures => new EnabledFeaturesClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -346,7 +268,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public EntitySentimentClient EntitySentiment => new EntitySentimentClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -355,7 +277,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ExecutionsClient Executions => new ExecutionsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -364,7 +286,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ExplicitContentClient ExplicitContent => new ExplicitContentClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -373,7 +295,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ExplicitContentDetectionAsyncClient ExplicitContentDetectionAsync => new ExplicitContentDetectionAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -382,7 +304,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public FaceCompareClient FaceCompare => new FaceCompareClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -391,7 +313,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public FaceDetectionClient FaceDetection => new FaceDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -400,7 +322,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public FaceDetectionAsyncClient FaceDetectionAsync => new FaceDetectionAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -409,7 +331,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public FaceRecognitionClient FaceRecognition => new FaceRecognitionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -418,7 +340,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public FinancialParserClient FinancialParser => new FinancialParserClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -427,7 +349,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public GenerationClient Generation => new GenerationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -436,7 +358,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public GenerationAsyncClient GenerationAsync => new GenerationAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -445,7 +367,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public IdentityParserClient IdentityParser => new IdentityParserClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -454,7 +376,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public InfosClient Infos => new InfosClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -463,7 +385,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public KeywordExtractionClient KeywordExtraction => new KeywordExtractionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -472,7 +394,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public LabelDetectionAsyncClient LabelDetectionAsync => new LabelDetectionAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -481,7 +403,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public LandmarkDetectionClient LandmarkDetection => new LandmarkDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -490,7 +412,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public LanguageDetectionClient LanguageDetection => new LanguageDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -499,7 +421,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public LogoDetectionClient LogoDetection => new LogoDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -508,7 +430,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public LogoDetectionAsyncClient LogoDetectionAsync => new LogoDetectionAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -517,7 +439,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ModerationClient Moderation => new ModerationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -526,7 +448,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public NamedEntityRecognitionClient NamedEntityRecognition => new NamedEntityRecognitionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -535,7 +457,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ObjectDetectionClient ObjectDetection => new ObjectDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -544,7 +466,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ObjectTrackingAsyncClient ObjectTrackingAsync => new ObjectTrackingAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -553,7 +475,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public OcrClient Ocr => new OcrClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -562,7 +484,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public OcrAsyncClient OcrAsync => new OcrAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -571,7 +493,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public OcrTablesAsyncClient OcrTablesAsync => new OcrTablesAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -580,7 +502,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public PersonTrackingAsyncClient PersonTrackingAsync => new PersonTrackingAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -589,7 +511,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public PlagiaDetectionClient PlagiaDetection => new PlagiaDetectionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -598,7 +520,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public PromptOptimizationClient PromptOptimization => new PromptOptimizationClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -607,7 +529,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public PromptsClient Prompts => new PromptsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -616,7 +538,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public QuestionAnswerClient QuestionAnswer => new QuestionAnswerClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -625,7 +547,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public QuestionAnswerAsyncClient QuestionAnswerAsync => new QuestionAnswerAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -634,7 +556,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ResourcesClient Resources => new ResourcesClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -643,7 +565,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ResumeParserClient ResumeParser => new ResumeParserClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -652,7 +574,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public SearchClient Search => new SearchClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -661,7 +583,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public SentimentAnalysisClient SentimentAnalysis => new SentimentAnalysisClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -670,7 +592,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public ShotChangeDetectionAsyncClient ShotChangeDetectionAsync => new ShotChangeDetectionAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -679,7 +601,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public SpeechToTextAsyncClient SpeechToTextAsync => new SpeechToTextAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -688,7 +610,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public SpellCheckClient SpellCheck => new SpellCheckClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -697,7 +619,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public SummarizeClient Summarize => new SummarizeClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -706,7 +628,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public TextDetectionAsyncClient TextDetectionAsync => new TextDetectionAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -715,7 +637,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public TextToSpeechClient TextToSpeech => new TextToSpeechClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -724,7 +646,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public TextToSpeechAsyncClient TextToSpeechAsync => new TextToSpeechAsyncClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -733,7 +655,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public TopicExtractionClient TopicExtraction => new TopicExtractionClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -742,7 +664,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public UserManagementClient UserManagement => new UserManagementClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -751,7 +673,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public WebhookClient Webhook => new WebhookClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// @@ -760,7 +682,7 @@ public sealed partial class EdenAIClient : global::EdenAI.IEdenAIClient, global: public WorkflowsClient Workflows => new WorkflowsClient(HttpClient, authorizations: Authorizations) { ReadResponseAsString = ReadResponseAsString, - JsonSerializerOptions = JsonSerializerOptions, + JsonSerializerContext = JsonSerializerContext, }; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.ImageImageEmbeddingsCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.ImageImageEmbeddingsCreate.g.cs index ab6a623..6cc2921 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.ImageImageEmbeddingsCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.ImageImageEmbeddingsCreate.g.cs @@ -82,7 +82,7 @@ partial void ProcessImageImageEmbeddingsCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -119,13 +119,13 @@ partial void ProcessImageImageEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -157,13 +157,13 @@ partial void ProcessImageImageEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -195,13 +195,13 @@ partial void ProcessImageImageEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -233,13 +233,13 @@ partial void ProcessImageImageEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -283,7 +283,7 @@ partial void ProcessImageImageEmbeddingsCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageembeddingsResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageembeddingsResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -313,7 +313,7 @@ partial void ProcessImageImageEmbeddingsCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImageembeddingsResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageembeddingsResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.TextTextEmbeddingsCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.TextTextEmbeddingsCreate.g.cs index 4438bce..52c7284 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.TextTextEmbeddingsCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.TextTextEmbeddingsCreate.g.cs @@ -105,7 +105,7 @@ partial void ProcessTextTextEmbeddingsCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -142,13 +142,13 @@ partial void ProcessTextTextEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -180,13 +180,13 @@ partial void ProcessTextTextEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -218,13 +218,13 @@ partial void ProcessTextTextEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -256,13 +256,13 @@ partial void ProcessTextTextEmbeddingsCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -306,7 +306,7 @@ partial void ProcessTextTextEmbeddingsCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextembeddingsResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextembeddingsResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -336,7 +336,7 @@ partial void ProcessTextTextEmbeddingsCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextembeddingsResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextembeddingsResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.g.cs index a38b7f2..31761bc 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EmbeddingsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class EmbeddingsClient : global::EdenAI.IEmbeddingsClient, /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.TextTextEmotionDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.TextTextEmotionDetectionCreate.g.cs index 2ff83b6..ccf512e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.TextTextEmotionDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.TextTextEmotionDetectionCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessTextTextEmotionDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -109,13 +109,13 @@ partial void ProcessTextTextEmotionDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -147,13 +147,13 @@ partial void ProcessTextTextEmotionDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessTextTextEmotionDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessTextTextEmotionDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,7 +273,7 @@ partial void ProcessTextTextEmotionDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextemotionDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextemotionDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessTextTextEmotionDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextemotionDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextemotionDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.g.cs index fe1ed7e..54f110a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EmotionDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class EmotionDetectionClient : global::EdenAI.IEmotionDete /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.EnabledFeaturesClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.EnabledFeaturesClient.g.cs index 8ba0ba3..2d2d268 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EnabledFeaturesClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EnabledFeaturesClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class EnabledFeaturesClient : global::EdenAI.IEnabledFeatu /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.TextTextEntitySentimentCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.TextTextEntitySentimentCreate.g.cs index 5d2c05c..8bd3253 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.TextTextEntitySentimentCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.TextTextEntitySentimentCreate.g.cs @@ -136,7 +136,7 @@ partial void ProcessTextTextEntitySentimentCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -173,13 +173,13 @@ partial void ProcessTextTextEntitySentimentCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -211,13 +211,13 @@ partial void ProcessTextTextEntitySentimentCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -249,13 +249,13 @@ partial void ProcessTextTextEntitySentimentCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -287,13 +287,13 @@ partial void ProcessTextTextEntitySentimentCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -337,7 +337,7 @@ partial void ProcessTextTextEntitySentimentCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextentitySentimentResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextentitySentimentResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -367,7 +367,7 @@ partial void ProcessTextTextEntitySentimentCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextentitySentimentResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextentitySentimentResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.g.cs index 278b7d8..d2e74bb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.EntitySentimentClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class EntitySentimentClient : global::EdenAI.IEntitySentim /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionCreate.g.cs index 3141aac..ec7f4a3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionCreate.g.cs @@ -77,7 +77,7 @@ partial void ProcessWorkflowWorkflowExecutionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -127,7 +127,7 @@ partial void ProcessWorkflowWorkflowExecutionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ExecutionExampleSuccessCreate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ExecutionExampleSuccessCreate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -157,7 +157,7 @@ partial void ProcessWorkflowWorkflowExecutionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ExecutionExampleSuccessCreate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ExecutionExampleSuccessCreate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionList.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionList.g.cs index fd7b4f7..bd5a09c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionList.g.cs @@ -123,7 +123,7 @@ partial void ProcessWorkflowWorkflowExecutionListResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PaginatedExecutionListList.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PaginatedExecutionListList.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -153,7 +153,7 @@ partial void ProcessWorkflowWorkflowExecutionListResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PaginatedExecutionListList.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PaginatedExecutionListList.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionRetrieve.g.cs index 2a89355..d775200 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.WorkflowWorkflowExecutionRetrieve.g.cs @@ -114,7 +114,7 @@ partial void ProcessWorkflowWorkflowExecutionRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ExecutionExampleSuccessCreate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ExecutionExampleSuccessCreate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -144,7 +144,7 @@ partial void ProcessWorkflowWorkflowExecutionRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ExecutionExampleSuccessCreate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ExecutionExampleSuccessCreate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.g.cs index c79c4a4..8f6df1b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExecutionsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ExecutionsClient : global::EdenAI.IExecutionsClient, /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.ImageImageExplicitContentCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.ImageImageExplicitContentCreate.g.cs index 6775c07..3f9e294 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.ImageImageExplicitContentCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.ImageImageExplicitContentCreate.g.cs @@ -84,7 +84,7 @@ partial void ProcessImageImageExplicitContentCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -121,13 +121,13 @@ partial void ProcessImageImageExplicitContentCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -159,13 +159,13 @@ partial void ProcessImageImageExplicitContentCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -197,13 +197,13 @@ partial void ProcessImageImageExplicitContentCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -235,13 +235,13 @@ partial void ProcessImageImageExplicitContentCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -285,7 +285,7 @@ partial void ProcessImageImageExplicitContentCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageexplicitContentResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageexplicitContentResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -315,7 +315,7 @@ partial void ProcessImageImageExplicitContentCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImageexplicitContentResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageexplicitContentResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.g.cs index e99b10c..e65f365 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ExplicitContentClient : global::EdenAI.IExplicitCont /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncCreate.g.cs index 56f89a0..3eafe6e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncCreate.g.cs @@ -190,7 +190,7 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncCreateResponseContent __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -220,7 +220,7 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncCreateResponseContent ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve.g.cs index c68db7d..189b7f0 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieveResponseConte __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieveResponseConte ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve2.g.cs index 8431688..16ae818 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.VideoVideoExplicitContentDetectionAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieve2ResponseCont if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieve2ResponseCont if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieve2ResponseCont if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieve2ResponseCont if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieve2ResponseCont __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideoexplicitContentDetectionAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideoexplicitContentDetectionAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoExplicitContentDetectionAsyncRetrieve2ResponseCont ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideoexplicitContentDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideoexplicitContentDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.g.cs index 868ffb0..3c85cb6 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ExplicitContentDetectionAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ExplicitContentDetectionAsyncClient : global::EdenAI /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.ImageImageFaceCompareCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.ImageImageFaceCompareCreate.g.cs index bd4cdc8..5c1a94e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.ImageImageFaceCompareCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.ImageImageFaceCompareCreate.g.cs @@ -74,7 +74,7 @@ partial void ProcessImageImageFaceCompareCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -111,13 +111,13 @@ partial void ProcessImageImageFaceCompareCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -149,13 +149,13 @@ partial void ProcessImageImageFaceCompareCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -187,13 +187,13 @@ partial void ProcessImageImageFaceCompareCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -225,13 +225,13 @@ partial void ProcessImageImageFaceCompareCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -275,7 +275,7 @@ partial void ProcessImageImageFaceCompareCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagefaceCompareResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagefaceCompareResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -305,7 +305,7 @@ partial void ProcessImageImageFaceCompareCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagefaceCompareResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagefaceCompareResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.g.cs index 09068de..4f85253 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceCompareClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class FaceCompareClient : global::EdenAI.IFaceCompareClien /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncCreate.g.cs index 1d30876..e1fcf7c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncCreate.g.cs @@ -190,7 +190,7 @@ partial void ProcessVideoVideoFaceDetectionAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -220,7 +220,7 @@ partial void ProcessVideoVideoFaceDetectionAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve.g.cs index 5d3066a..061c48d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve2.g.cs index c6e4a36..5fd6eb9 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.VideoVideoFaceDetectionAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideofaceDetectionAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideofaceDetectionAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoFaceDetectionAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideofaceDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideofaceDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.g.cs index fc45eeb..27fbf0e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class FaceDetectionAsyncClient : global::EdenAI.IFaceDetec /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.ImageImageFaceDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.ImageImageFaceDetectionCreate.g.cs index bc9bb05..915391f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.ImageImageFaceDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.ImageImageFaceDetectionCreate.g.cs @@ -75,7 +75,7 @@ partial void ProcessImageImageFaceDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -112,13 +112,13 @@ partial void ProcessImageImageFaceDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -150,13 +150,13 @@ partial void ProcessImageImageFaceDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -188,13 +188,13 @@ partial void ProcessImageImageFaceDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -226,13 +226,13 @@ partial void ProcessImageImageFaceDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -276,7 +276,7 @@ partial void ProcessImageImageFaceDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagefaceDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagefaceDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -306,7 +306,7 @@ partial void ProcessImageImageFaceDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagefaceDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagefaceDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.g.cs index 8d749d4..bdf9273 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class FaceDetectionClient : global::EdenAI.IFaceDetectionC /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionAddFaceCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionAddFaceCreate.g.cs index e9669a8..8065c52 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionAddFaceCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionAddFaceCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessImageImageFaceRecognitionAddFaceCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessImageImageFaceRecognitionAddFaceCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessImageImageFaceRecognitionAddFaceCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessImageImageFaceRecognitionAddFaceCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessImageImageFaceRecognitionAddFaceCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessImageImageFaceRecognitionAddFaceCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessImageImageFaceRecognitionAddFaceCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionDeleteFaceCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionDeleteFaceCreate.g.cs index f8745ac..e021fa8 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionDeleteFaceCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionDeleteFaceCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessImageImageFaceRecognitionDeleteFaceCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessImageImageFaceRecognitionDeleteFaceCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessImageImageFaceRecognitionDeleteFaceCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessImageImageFaceRecognitionDeleteFaceCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessImageImageFaceRecognitionDeleteFaceCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessImageImageFaceRecognitionDeleteFaceCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessImageImageFaceRecognitionDeleteFaceCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionListFacesRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionListFacesRetrieve.g.cs index e482908..a6408d1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionListFacesRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionListFacesRetrieve.g.cs @@ -158,13 +158,13 @@ partial void ProcessImageImageFaceRecognitionListFacesRetrieveResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -196,13 +196,13 @@ partial void ProcessImageImageFaceRecognitionListFacesRetrieveResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -234,13 +234,13 @@ partial void ProcessImageImageFaceRecognitionListFacesRetrieveResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -272,13 +272,13 @@ partial void ProcessImageImageFaceRecognitionListFacesRetrieveResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -322,7 +322,7 @@ partial void ProcessImageImageFaceRecognitionListFacesRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -352,7 +352,7 @@ partial void ProcessImageImageFaceRecognitionListFacesRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionRecognizeCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionRecognizeCreate.g.cs index 7dc173a..4ec86c3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionRecognizeCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.ImageImageFaceRecognitionRecognizeCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessImageImageFaceRecognitionRecognizeCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessImageImageFaceRecognitionRecognizeCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessImageImageFaceRecognitionRecognizeCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessImageImageFaceRecognitionRecognizeCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessImageImageFaceRecognitionRecognizeCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessImageImageFaceRecognitionRecognizeCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagefaceRecognitionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessImageImageFaceRecognitionRecognizeCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagefaceRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.g.cs index f26e1e9..9e0db78 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FaceRecognitionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class FaceRecognitionClient : global::EdenAI.IFaceRecognit /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.OcrOcrFinancialParserCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.OcrOcrFinancialParserCreate.g.cs index 60a2654..030d7c6 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.OcrOcrFinancialParserCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.OcrOcrFinancialParserCreate.g.cs @@ -225,7 +225,7 @@ partial void ProcessOcrOcrFinancialParserCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -262,13 +262,13 @@ partial void ProcessOcrOcrFinancialParserCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -300,13 +300,13 @@ partial void ProcessOcrOcrFinancialParserCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -338,13 +338,13 @@ partial void ProcessOcrOcrFinancialParserCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -376,13 +376,13 @@ partial void ProcessOcrOcrFinancialParserCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -426,7 +426,7 @@ partial void ProcessOcrOcrFinancialParserCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.OcrfinancialParserResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.OcrfinancialParserResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -456,7 +456,7 @@ partial void ProcessOcrOcrFinancialParserCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.OcrfinancialParserResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.OcrfinancialParserResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.g.cs index 82e17a3..04b0317 100644 --- a/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.FinancialParserClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class FinancialParserClient : global::EdenAI.IFinancialPar /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncCreate.g.cs index c4e2cae..b9dd1e3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncCreate.g.cs @@ -239,7 +239,7 @@ partial void ProcessVideoVideoGenerationAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -269,7 +269,7 @@ partial void ProcessVideoVideoGenerationAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve.g.cs index ad9e1e8..7f69fbd 100644 --- a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoGenerationAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoGenerationAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve2.g.cs index f001fa2..6e16bee 100644 --- a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.VideoVideoGenerationAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoGenerationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoGenerationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoGenerationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoGenerationAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoGenerationAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideogenerationAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideogenerationAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoGenerationAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideogenerationAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideogenerationAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.g.cs index 44265f0..d598301 100644 --- a/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.GenerationAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class GenerationAsyncClient : global::EdenAI.IGenerationAs /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.GenerationClient.ImageImageGenerationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.GenerationClient.ImageImageGenerationCreate.g.cs index 2f78e34..86fe858 100644 --- a/src/libs/EdenAI/Generated/EdenAI.GenerationClient.ImageImageGenerationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.GenerationClient.ImageImageGenerationCreate.g.cs @@ -137,7 +137,7 @@ partial void ProcessImageImageGenerationCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -174,13 +174,13 @@ partial void ProcessImageImageGenerationCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -212,13 +212,13 @@ partial void ProcessImageImageGenerationCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -250,13 +250,13 @@ partial void ProcessImageImageGenerationCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -288,13 +288,13 @@ partial void ProcessImageImageGenerationCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -338,7 +338,7 @@ partial void ProcessImageImageGenerationCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagegenerationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagegenerationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -368,7 +368,7 @@ partial void ProcessImageImageGenerationCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagegenerationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagegenerationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.GenerationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.GenerationClient.g.cs index edea3d3..1ce5af1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.GenerationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.GenerationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class GenerationClient : global::EdenAI.IGenerationClient, /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.IAiDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IAiDetectionClient.g.cs index 143e20f..0548413 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IAiDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IAiDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IAiDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IAnonymizationAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IAnonymizationAsyncClient.g.cs index d449a76..0ad4c7c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IAnonymizationAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IAnonymizationAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IAnonymizationAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IAnonymizationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IAnonymizationClient.g.cs index cdace75..91bcfe5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IAnonymizationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IAnonymizationClient.g.cs @@ -36,7 +36,7 @@ public partial interface IAnonymizationClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IAutomaticTranslationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IAutomaticTranslationClient.g.cs index 3384699..a6c9691 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IAutomaticTranslationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IAutomaticTranslationClient.g.cs @@ -36,7 +36,7 @@ public partial interface IAutomaticTranslationClient : global::System.IDisposabl /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IAutomlClassificationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IAutomlClassificationClient.g.cs index 6637402..bca9102 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IAutomlClassificationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IAutomlClassificationClient.g.cs @@ -36,7 +36,7 @@ public partial interface IAutomlClassificationClient : global::System.IDisposabl /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IBackgroundRemovalClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IBackgroundRemovalClient.g.cs index d39290e..b3988e1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IBackgroundRemovalClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IBackgroundRemovalClient.g.cs @@ -36,7 +36,7 @@ public partial interface IBackgroundRemovalClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IBankCheckParsingClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IBankCheckParsingClient.g.cs index 449b660..3fe6947 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IBankCheckParsingClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IBankCheckParsingClient.g.cs @@ -36,7 +36,7 @@ public partial interface IBankCheckParsingClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IBatchClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IBatchClient.g.cs index 80c7e41..d0b9171 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IBatchClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IBatchClient.g.cs @@ -36,7 +36,7 @@ public partial interface IBatchClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IChatClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IChatClient.g.cs index a8e503e..8a0bb5d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IChatClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IChatClient.g.cs @@ -36,7 +36,7 @@ public partial interface IChatClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICodeGenerationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICodeGenerationClient.g.cs index e79742f..7afeaf5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICodeGenerationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICodeGenerationClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICodeGenerationClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICostMonitoringClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICostMonitoringClient.g.cs index 3362f16..fd94a97 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICostMonitoringClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICostMonitoringClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICostMonitoringClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotCreateOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotCreateOperationsClient.g.cs index 6da3e4d..e8fe0fa 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotCreateOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotCreateOperationsClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICustomChatbotCreateOperationsClient : global::System.I /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotDeleteOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotDeleteOperationsClient.g.cs index ff781be..a83423d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotDeleteOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotDeleteOperationsClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICustomChatbotDeleteOperationsClient : global::System.I /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotInfoOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotInfoOperationsClient.g.cs index 5a02510..d73cb54 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotInfoOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotInfoOperationsClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICustomChatbotInfoOperationsClient : global::System.IDi /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotQueryOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotQueryOperationsClient.g.cs index 966cf51..2553b88 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotQueryOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotQueryOperationsClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICustomChatbotQueryOperationsClient : global::System.ID /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUpdateOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUpdateOperationsClient.g.cs index 50cbd8c..61d75c5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUpdateOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUpdateOperationsClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICustomChatbotUpdateOperationsClient : global::System.I /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUploadOperationsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUploadOperationsClient.g.cs index acd0fe4..05ad18d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUploadOperationsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICustomChatbotUploadOperationsClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICustomChatbotUploadOperationsClient : global::System.I /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ICustomDocumentParsingAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ICustomDocumentParsingAsyncClient.g.cs index ac9a79c..a1b02d6 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ICustomDocumentParsingAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ICustomDocumentParsingAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface ICustomDocumentParsingAsyncClient : global::System.IDis /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IDataExtractionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IDataExtractionClient.g.cs index 3d3044a..15b79e1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IDataExtractionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IDataExtractionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IDataExtractionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionAsyncClient.g.cs index 5196b0a..ff5323f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IDeepfakeDetectionAsyncClient : global::System.IDisposa /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionClient.g.cs index 9f503b2..449ce9a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IDeepfakeDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IDeepfakeDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IDocumentTranslationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IDocumentTranslationClient.g.cs index 70f917f..890a4ac 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IDocumentTranslationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IDocumentTranslationClient.g.cs @@ -36,7 +36,7 @@ public partial interface IDocumentTranslationClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IEdenAIClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IEdenAIClient.g.cs index 04bbcff..02430cb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IEdenAIClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IEdenAIClient.g.cs @@ -37,7 +37,7 @@ public partial interface IEdenAIClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.IEmbeddingsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IEmbeddingsClient.g.cs index 19a75ec..a9b6e87 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IEmbeddingsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IEmbeddingsClient.g.cs @@ -36,7 +36,7 @@ public partial interface IEmbeddingsClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IEmotionDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IEmotionDetectionClient.g.cs index 1a59505..a7f7535 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IEmotionDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IEmotionDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IEmotionDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IEnabledFeaturesClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IEnabledFeaturesClient.g.cs index a395cd2..49c1fe2 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IEnabledFeaturesClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IEnabledFeaturesClient.g.cs @@ -36,7 +36,7 @@ public partial interface IEnabledFeaturesClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IEntitySentimentClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IEntitySentimentClient.g.cs index 644beb8..f5e1dbb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IEntitySentimentClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IEntitySentimentClient.g.cs @@ -36,7 +36,7 @@ public partial interface IEntitySentimentClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IExecutionsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IExecutionsClient.g.cs index a408d07..e1b8d24 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IExecutionsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IExecutionsClient.g.cs @@ -36,7 +36,7 @@ public partial interface IExecutionsClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IExplicitContentClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IExplicitContentClient.g.cs index 742829c..15a3a1f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IExplicitContentClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IExplicitContentClient.g.cs @@ -36,7 +36,7 @@ public partial interface IExplicitContentClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IExplicitContentDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IExplicitContentDetectionAsyncClient.g.cs index e4d1a8c..297cb85 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IExplicitContentDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IExplicitContentDetectionAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IExplicitContentDetectionAsyncClient : global::System.I /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IFaceCompareClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IFaceCompareClient.g.cs index 5ec21d6..a71015f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IFaceCompareClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IFaceCompareClient.g.cs @@ -36,7 +36,7 @@ public partial interface IFaceCompareClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionAsyncClient.g.cs index d82295c..dcfb14c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IFaceDetectionAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionClient.g.cs index 9b509ad..34900d7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IFaceDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IFaceDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IFaceRecognitionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IFaceRecognitionClient.g.cs index b797ecb..f6c7788 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IFaceRecognitionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IFaceRecognitionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IFaceRecognitionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IFinancialParserClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IFinancialParserClient.g.cs index 629ee46..a8dd799 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IFinancialParserClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IFinancialParserClient.g.cs @@ -36,7 +36,7 @@ public partial interface IFinancialParserClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IGenerationAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IGenerationAsyncClient.g.cs index 3c70659..07be081 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IGenerationAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IGenerationAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IGenerationAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IGenerationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IGenerationClient.g.cs index fdb3329..d4cc3fa 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IGenerationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IGenerationClient.g.cs @@ -36,7 +36,7 @@ public partial interface IGenerationClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IIdentityParserClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IIdentityParserClient.g.cs index 85e803c..01c6805 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IIdentityParserClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IIdentityParserClient.g.cs @@ -36,7 +36,7 @@ public partial interface IIdentityParserClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IInfosClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IInfosClient.g.cs index e14116d..6dc537d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IInfosClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IInfosClient.g.cs @@ -36,7 +36,7 @@ public partial interface IInfosClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IKeywordExtractionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IKeywordExtractionClient.g.cs index 88905f0..59908c3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IKeywordExtractionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IKeywordExtractionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IKeywordExtractionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ILabelDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ILabelDetectionAsyncClient.g.cs index 6c357a5..084962b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ILabelDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ILabelDetectionAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface ILabelDetectionAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ILandmarkDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ILandmarkDetectionClient.g.cs index 95c9a6d..ceab2bc 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ILandmarkDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ILandmarkDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface ILandmarkDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ILanguageDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ILanguageDetectionClient.g.cs index f1d06f2..b4c6123 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ILanguageDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ILanguageDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface ILanguageDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionAsyncClient.g.cs index 1186e90..211806f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface ILogoDetectionAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionClient.g.cs index c18e8ac..e755f97 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ILogoDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface ILogoDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IModerationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IModerationClient.g.cs index 1849bf7..81f699a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IModerationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IModerationClient.g.cs @@ -36,7 +36,7 @@ public partial interface IModerationClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.INamedEntityRecognitionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.INamedEntityRecognitionClient.g.cs index b89dd4c..c0ca4f5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.INamedEntityRecognitionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.INamedEntityRecognitionClient.g.cs @@ -36,7 +36,7 @@ public partial interface INamedEntityRecognitionClient : global::System.IDisposa /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IObjectDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IObjectDetectionClient.g.cs index 573f06d..90c0441 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IObjectDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IObjectDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IObjectDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IObjectTrackingAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IObjectTrackingAsyncClient.g.cs index 27523ea..94e4235 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IObjectTrackingAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IObjectTrackingAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IObjectTrackingAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IOcrAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IOcrAsyncClient.g.cs index deb834b..8b7ebe0 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IOcrAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IOcrAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IOcrAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IOcrClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IOcrClient.g.cs index c5e0fb7..8412a4b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IOcrClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IOcrClient.g.cs @@ -36,7 +36,7 @@ public partial interface IOcrClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IOcrTablesAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IOcrTablesAsyncClient.g.cs index b705ca5..6f89c75 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IOcrTablesAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IOcrTablesAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IOcrTablesAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IPersonTrackingAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IPersonTrackingAsyncClient.g.cs index f539638..013a282 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IPersonTrackingAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IPersonTrackingAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IPersonTrackingAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IPlagiaDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IPlagiaDetectionClient.g.cs index 6ceaffd..e2a59d2 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IPlagiaDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IPlagiaDetectionClient.g.cs @@ -36,7 +36,7 @@ public partial interface IPlagiaDetectionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IPromptOptimizationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IPromptOptimizationClient.g.cs index 02ebcff..4e15d6b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IPromptOptimizationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IPromptOptimizationClient.g.cs @@ -36,7 +36,7 @@ public partial interface IPromptOptimizationClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IPromptsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IPromptsClient.g.cs index d3dc35d..4100c0c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IPromptsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IPromptsClient.g.cs @@ -36,7 +36,7 @@ public partial interface IPromptsClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerAsyncClient.g.cs index b131486..489586d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IQuestionAnswerAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerClient.g.cs index d1cb19e..d88997e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IQuestionAnswerClient.g.cs @@ -36,7 +36,7 @@ public partial interface IQuestionAnswerClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IResourcesClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IResourcesClient.g.cs index 4aea1bf..8af6e67 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IResourcesClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IResourcesClient.g.cs @@ -36,7 +36,7 @@ public partial interface IResourcesClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IResumeParserClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IResumeParserClient.g.cs index 30167e5..808a4ce 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IResumeParserClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IResumeParserClient.g.cs @@ -36,7 +36,7 @@ public partial interface IResumeParserClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ISearchClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ISearchClient.g.cs index 75ad486..90dd0c6 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ISearchClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ISearchClient.g.cs @@ -36,7 +36,7 @@ public partial interface ISearchClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ISentimentAnalysisClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ISentimentAnalysisClient.g.cs index d19d64f..f188cdf 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ISentimentAnalysisClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ISentimentAnalysisClient.g.cs @@ -36,7 +36,7 @@ public partial interface ISentimentAnalysisClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IShotChangeDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IShotChangeDetectionAsyncClient.g.cs index d6d668b..8df4222 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IShotChangeDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IShotChangeDetectionAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface IShotChangeDetectionAsyncClient : global::System.IDispo /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ISpeechToTextAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ISpeechToTextAsyncClient.g.cs index d450759..56ecdd1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ISpeechToTextAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ISpeechToTextAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface ISpeechToTextAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ISpellCheckClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ISpellCheckClient.g.cs index 06216d3..6ed9d5f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ISpellCheckClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ISpellCheckClient.g.cs @@ -36,7 +36,7 @@ public partial interface ISpellCheckClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ISummarizeClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ISummarizeClient.g.cs index d462494..d23576d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ISummarizeClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ISummarizeClient.g.cs @@ -36,7 +36,7 @@ public partial interface ISummarizeClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ITextDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ITextDetectionAsyncClient.g.cs index c96940d..ee2c348 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ITextDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ITextDetectionAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface ITextDetectionAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechAsyncClient.g.cs index 9d4394c..b151a5e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechAsyncClient.g.cs @@ -36,7 +36,7 @@ public partial interface ITextToSpeechAsyncClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechClient.g.cs index 61c7124..63d35d3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ITextToSpeechClient.g.cs @@ -36,7 +36,7 @@ public partial interface ITextToSpeechClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.ITopicExtractionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ITopicExtractionClient.g.cs index 5e381f9..ee217b8 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ITopicExtractionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ITopicExtractionClient.g.cs @@ -36,7 +36,7 @@ public partial interface ITopicExtractionClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IUserManagementClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IUserManagementClient.g.cs index 18028ed..472a006 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IUserManagementClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IUserManagementClient.g.cs @@ -36,7 +36,7 @@ public partial interface IUserManagementClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IWebhookClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IWebhookClient.g.cs index 25e7748..eac8fda 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IWebhookClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IWebhookClient.g.cs @@ -36,7 +36,7 @@ public partial interface IWebhookClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IWorkflowsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IWorkflowsClient.g.cs index 3e77e99..97efe49 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IWorkflowsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IWorkflowsClient.g.cs @@ -36,7 +36,7 @@ public partial interface IWorkflowsClient : global::System.IDisposable /// /// /// - global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } + global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } } diff --git a/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.OcrOcrIdentityParserCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.OcrOcrIdentityParserCreate.g.cs index ca1d54a..cfeeaac 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.OcrOcrIdentityParserCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.OcrOcrIdentityParserCreate.g.cs @@ -151,7 +151,7 @@ partial void ProcessOcrOcrIdentityParserCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -188,13 +188,13 @@ partial void ProcessOcrOcrIdentityParserCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -226,13 +226,13 @@ partial void ProcessOcrOcrIdentityParserCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -264,13 +264,13 @@ partial void ProcessOcrOcrIdentityParserCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -302,13 +302,13 @@ partial void ProcessOcrOcrIdentityParserCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -352,7 +352,7 @@ partial void ProcessOcrOcrIdentityParserCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.OcridentityParserResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.OcridentityParserResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -382,7 +382,7 @@ partial void ProcessOcrOcrIdentityParserCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.OcridentityParserResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.OcridentityParserResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.g.cs index 688e50d..264ba66 100644 --- a/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.IdentityParserClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class IdentityParserClient : global::EdenAI.IIdentityParse /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.InfosClient.InfoInfoProviderSubfeaturesList.g.cs b/src/libs/EdenAI/Generated/EdenAI.InfosClient.InfoInfoProviderSubfeaturesList.g.cs index 5c0df86..a81fe0e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.InfosClient.InfoInfoProviderSubfeaturesList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.InfosClient.InfoInfoProviderSubfeaturesList.g.cs @@ -159,7 +159,7 @@ partial void ProcessInfoInfoProviderSubfeaturesListResponseContent( __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -189,7 +189,7 @@ partial void ProcessInfoInfoProviderSubfeaturesListResponseContent( ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.InfosClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.InfosClient.g.cs index b1ee07c..4d6b12d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.InfosClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.InfosClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class InfosClient : global::EdenAI.IInfosClient, global::S /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf2.g.cs b/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf2.g.cs index 3352e1c..539f652 100644 --- a/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf2.g.cs @@ -3,7 +3,7 @@ namespace EdenAI.JsonConverters { /// - public class OneOfJsonConverter : global::System.Text.Json.Serialization.JsonConverter> + public class OneOfJsonConverter<[global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMembers(global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMemberTypes.PublicProperties)] T1, [global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMembers(global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMemberTypes.PublicProperties)] T2> : global::System.Text.Json.Serialization.JsonConverter> { /// public override global::EdenAI.OneOf Read( @@ -11,7 +11,8 @@ public class OneOfJsonConverter : global::System.Text.Json.Serialization global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) { - options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + var typeInfoResolver = options.TypeInfoResolver ?? throw new global::System.InvalidOperationException("TypeInfoResolver is not set."); using var __jsonDocument = global::System.Text.Json.JsonDocument.ParseValue(ref reader); @@ -26,7 +27,27 @@ public class OneOfJsonConverter : global::System.Text.Json.Serialization } var __score0 = 0; + { + var __ti = typeInfoResolver.GetTypeInfo(typeof(T1), options); + if (__ti != null && __ti.Kind == global::System.Text.Json.Serialization.Metadata.JsonTypeInfoKind.Object) + { + foreach (var __prop in __ti.Properties) + { + if (__jsonProps.Contains(__prop.Name)) __score0++; + } + } + } var __score1 = 0; + { + var __ti = typeInfoResolver.GetTypeInfo(typeof(T2), options); + if (__ti != null && __ti.Kind == global::System.Text.Json.Serialization.Metadata.JsonTypeInfoKind.Object) + { + foreach (var __prop in __ti.Properties) + { + if (__jsonProps.Contains(__prop.Name)) __score1++; + } + } + } var __bestScore = 0; var __bestIndex = -1; if (__score0 > __bestScore) { __bestScore = __score0; __bestIndex = 0; } @@ -41,7 +62,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serialization try { - value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T1), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T1).Name}"); + value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -56,7 +79,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serialization try { - value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T2), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T2).Name}"); + value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -72,7 +97,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serialization try { - value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T1), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T1).Name}"); + value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -84,7 +111,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serialization try { - value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T2), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T2).Name}"); + value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -109,15 +138,20 @@ public override void Write( global::EdenAI.OneOf value, global::System.Text.Json.JsonSerializerOptions options) { - options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + var typeInfoResolver = options.TypeInfoResolver ?? throw new global::System.InvalidOperationException("TypeInfoResolver is not set."); if (value.IsValue1) { - global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value1, typeof(T1), options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T1), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T1).Name}"); + global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value1!, typeInfo); } else if (value.IsValue2) { - global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value2, typeof(T2), options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T2), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T2).Name}"); + global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value2!, typeInfo); } } } diff --git a/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf3.g.cs b/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf3.g.cs index 82a34c0..7b9aa17 100644 --- a/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf3.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.JsonConverters.OneOf3.g.cs @@ -3,7 +3,7 @@ namespace EdenAI.JsonConverters { /// - public class OneOfJsonConverter : global::System.Text.Json.Serialization.JsonConverter> + public class OneOfJsonConverter<[global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMembers(global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMemberTypes.PublicProperties)] T1, [global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMembers(global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMemberTypes.PublicProperties)] T2, [global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMembers(global::System.Diagnostics.CodeAnalysis.DynamicallyAccessedMemberTypes.PublicProperties)] T3> : global::System.Text.Json.Serialization.JsonConverter> { /// public override global::EdenAI.OneOf Read( @@ -11,7 +11,8 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa global::System.Type typeToConvert, global::System.Text.Json.JsonSerializerOptions options) { - options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + var typeInfoResolver = options.TypeInfoResolver ?? throw new global::System.InvalidOperationException("TypeInfoResolver is not set."); using var __jsonDocument = global::System.Text.Json.JsonDocument.ParseValue(ref reader); @@ -26,8 +27,38 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa } var __score0 = 0; + { + var __ti = typeInfoResolver.GetTypeInfo(typeof(T1), options); + if (__ti != null && __ti.Kind == global::System.Text.Json.Serialization.Metadata.JsonTypeInfoKind.Object) + { + foreach (var __prop in __ti.Properties) + { + if (__jsonProps.Contains(__prop.Name)) __score0++; + } + } + } var __score1 = 0; + { + var __ti = typeInfoResolver.GetTypeInfo(typeof(T2), options); + if (__ti != null && __ti.Kind == global::System.Text.Json.Serialization.Metadata.JsonTypeInfoKind.Object) + { + foreach (var __prop in __ti.Properties) + { + if (__jsonProps.Contains(__prop.Name)) __score1++; + } + } + } var __score2 = 0; + { + var __ti = typeInfoResolver.GetTypeInfo(typeof(T3), options); + if (__ti != null && __ti.Kind == global::System.Text.Json.Serialization.Metadata.JsonTypeInfoKind.Object) + { + foreach (var __prop in __ti.Properties) + { + if (__jsonProps.Contains(__prop.Name)) __score2++; + } + } + } var __bestScore = 0; var __bestIndex = -1; if (__score0 > __bestScore) { __bestScore = __score0; __bestIndex = 0; } @@ -44,7 +75,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa try { - value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T1), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T1).Name}"); + value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -59,7 +92,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa try { - value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T2), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T2).Name}"); + value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -74,7 +109,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa try { - value3 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T3), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T3).Name}"); + value3 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -90,7 +127,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa try { - value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T1), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T1).Name}"); + value1 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -102,7 +141,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa try { - value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T2), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T2).Name}"); + value2 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -114,7 +155,9 @@ public class OneOfJsonConverter : global::System.Text.Json.Serializa try { - value3 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T3), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T3).Name}"); + value3 = global::System.Text.Json.JsonSerializer.Deserialize(__rawJson, typeInfo); } catch (global::System.Text.Json.JsonException) { @@ -141,19 +184,26 @@ public override void Write( global::EdenAI.OneOf value, global::System.Text.Json.JsonSerializerOptions options) { - options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + options = options ?? throw new global::System.ArgumentNullException(nameof(options)); + var typeInfoResolver = options.TypeInfoResolver ?? throw new global::System.InvalidOperationException("TypeInfoResolver is not set."); if (value.IsValue1) { - global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value1, typeof(T1), options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T1), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T1).Name}"); + global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value1!, typeInfo); } else if (value.IsValue2) { - global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value2, typeof(T2), options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T2), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T2).Name}"); + global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value2!, typeInfo); } else if (value.IsValue3) { - global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value3, typeof(T3), options); + var typeInfo = typeInfoResolver.GetTypeInfo(typeof(T3), options) as global::System.Text.Json.Serialization.Metadata.JsonTypeInfo ?? + throw new global::System.InvalidOperationException($"Cannot get type info for {typeof(T3).Name}"); + global::System.Text.Json.JsonSerializer.Serialize(writer, value.Value3!, typeInfo); } } } diff --git a/src/libs/EdenAI/Generated/EdenAI.JsonSerializerContext.g.cs b/src/libs/EdenAI/Generated/EdenAI.JsonSerializerContext.g.cs new file mode 100644 index 0000000..6973c28 --- /dev/null +++ b/src/libs/EdenAI/Generated/EdenAI.JsonSerializerContext.g.cs @@ -0,0 +1,814 @@ + +#nullable enable + +#pragma warning disable CS0618 // Type or member is obsolete +#pragma warning disable CS3016 // Arrays as attribute arguments is not CLS-compliant + +namespace EdenAI +{ + /// + /// + /// + [global::System.Text.Json.Serialization.JsonSourceGenerationOptions( + DefaultIgnoreCondition = global::System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull, + Converters = new global::System.Type[] + { + typeof(global::EdenAI.JsonConverters.BlankEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.BlankEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.CategoryTypeJsonConverter), + + typeof(global::EdenAI.JsonConverters.CategoryTypeNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ChatRoleJsonConverter), + + typeof(global::EdenAI.JsonConverters.ChatRoleNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.DataTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.DataTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.DbProviderEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.DbProviderEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.DetailTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.DetailTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.DocumentTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.DocumentTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.EmbeddingsProviderEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.EmbeddingsProviderEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.EntitySentimentEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.EntitySentimentEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ExecutionContentCreateStatusEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.ExecutionContentCreateStatusEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.FallbackTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.FallbackTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.FinalStatusEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.FinalStatusEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.GeneralSentimentEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.GeneralSentimentEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ImageaiDetectionAiDetectionDataClassPredictionEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.ImageaiDetectionAiDetectionDataClassPredictionEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.OptionEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.OptionEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.PredictionB20EnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.PredictionB20EnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.PriceUnitTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.PriceUnitTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ProjectTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.ProjectTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ReasoningEffortEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.ReasoningEffortEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.RepresentationEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.RepresentationEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.SentimentEbfEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.SentimentEbfEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ServiceTierEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.ServiceTierEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.StateEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.StateEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.Status549EnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.Status549EnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.Status889EnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.Status889EnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.SubCategoryTypeJsonConverter), + + typeof(global::EdenAI.JsonConverters.SubCategoryTypeNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ThinkingTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.ThinkingTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.TokenTypeEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.TokenTypeEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.ToolChoiceEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.ToolChoiceEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.TypeDe8EnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.TypeDe8EnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.TypeOfDataEnumJsonConverter), + + typeof(global::EdenAI.JsonConverters.TypeOfDataEnumNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.FeatureBatchRetrieveStatusJsonConverter), + + typeof(global::EdenAI.JsonConverters.FeatureBatchRetrieveStatusNullableJsonConverter), + + typeof(global::EdenAI.JsonConverters.OneOfJsonConverter), + + typeof(global::EdenAI.JsonConverters.OneOfJsonConverter), + + typeof(global::EdenAI.JsonConverters.OneOfJsonConverter), + + typeof(global::EdenAI.JsonConverters.OneOfJsonConverter), + + typeof(global::EdenAI.JsonConverters.UnixTimestampJsonConverter), + })] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.JsonSerializerContextTypes))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AIProject))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Guid))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(string))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ProjectTypeEnum), TypeInfoPropertyName = "ProjectTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.DateTime))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AddFileRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.DataTypeEnum), TypeInfoPropertyName = "DataTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(byte[]))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AddImageRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AddTextRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(object))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AddUrlRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(bool))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AddVideoRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AiDetectionItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(int))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AiProductFile))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OneOf), TypeInfoPropertyName = "OneOfStatus889EnumNullEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Status889Enum), TypeInfoPropertyName = "Status889Enum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.NullEnum))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AnonymizationAsyncRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AnonymizationBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AnonymizationEntity))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CategoryType), TypeInfoPropertyName = "CategoryType2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SubCategoryType), TypeInfoPropertyName = "SubCategoryType2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AnonymizationItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AskLLMRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(double))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AskYodaProjectUpdate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AskYourDataProjectRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.DbProviderEnum), TypeInfoPropertyName = "DbProviderEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.EmbeddingsProviderEnum), TypeInfoPropertyName = "EmbeddingsProviderEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AssetCreate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AssetCreateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AssetList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AssetListRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AssetUpdate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AssetUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncJobList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.StateEnum), TypeInfoPropertyName = "StateEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncOcrRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncVideoAnalysisRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AutomlClassificationListProjectsResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AutomlClassificationProject))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AutomlClassificationPredictRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AutomlClassificationTrainRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AutomlClassificationUploadDataRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TypeOfDataEnum), TypeInfoPropertyName = "TypeOfDataEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BadRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.NestedBadRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BatchLaunchFailedRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BatchLaunchResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BatchList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BatchRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BatchResponseRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BlankEnum), TypeInfoPropertyName = "BlankEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BoundingBox2))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.BoundixBoxOCRTable))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Cell))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatAvailableToolsRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatCompletionChoice))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatMessage))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatCompletionUsage))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.UsageTokensDetails))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatRole), TypeInfoPropertyName = "ChatRole2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatMessageContent))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatMessageDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatMessageRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatToolCallsRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ChatToolResultRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ContentNSFW))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Conversation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ConversationDetail))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Message))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ConversationDetailRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ConversationRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CostMonitoringResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TokenData))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Country))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CustomDocumentParsingAsyncBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CustomDocumentParsingAsyncItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CustomDocumentParsingAsyncRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CustomTokenUpdate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TokenTypeEnum), TypeInfoPropertyName = "TokenTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CustomTokensCreate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CustomTokensCreateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CustomTokensList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.DetailPerFrame))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PredictionB20Enum), TypeInfoPropertyName = "PredictionB20Enum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.DetailTypeEnum), TypeInfoPropertyName = "DetailTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Details))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.Dictionary))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.DocumentTypeEnum), TypeInfoPropertyName = "DocumentTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.EmbeddingDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.EmotionItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Entity))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.EntitySentimentEnum), TypeInfoPropertyName = "EntitySentimentEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Error))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.NestedError))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ExecutionContentCreate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ExecutionContentCreateStatusEnum), TypeInfoPropertyName = "ExecutionContentCreateStatusEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ExecutionExampleSuccessCreate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ExecutionFakeCreateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ExecutionList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ExplicitItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ExtractedTopic))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceAccessories))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceAttributes))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoFacePoses))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceCompareBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceEmotions))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceFacialHair))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceFeatures))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceHair))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceHairColor))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceLandmarks))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FacePoses))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceQuality))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceMakeup))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceOcclusions))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FaceMatch))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FallbackTypeEnum), TypeInfoPropertyName = "FallbackTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Feature))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FieldError))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinalStatusEnum), TypeInfoPropertyName = "FinalStatusEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialBankInformation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialBarcode))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialCustomerInformation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialDocumentInformation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialDocumentMetadata))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialLineItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialLocalInformation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialMerchantInformation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialParserObjectDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FinancialPaymentInformation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.GeneralSentimentEnum), TypeInfoPropertyName = "GeneralSentimentEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.GenerateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ReasoningEffortEnum), TypeInfoPropertyName = "ReasoningEffortEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.MetadataRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.Dictionary))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ServiceTierEnum), TypeInfoPropertyName = "ServiceTierEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ThinkingRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.GeneratedImageDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.GenerationAsyncRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageaiDetectionAiDetectionDataClassPredictionEnum), TypeInfoPropertyName = "ImageaiDetectionAiDetectionDataClassPredictionEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.InfosIdentityParserDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ItemIdentityParserDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.InfosKeywordExtractionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.InfosLanguageDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.InfosNamedEntityRecognitionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ItemBankCheckParsingDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.MicrModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ItemDataExtraction))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LandmarkItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LandmarkVertice))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LandmarkLocation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LandmarkLatLng))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LandmarksVideo))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LaunchAsyncJobResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Line))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Word))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ListAsyncJobResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ListChunkRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LogoBoundingPoly))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LogoVertice))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LogoItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LogoTrack))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoLogo))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LowerCloth))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.MessageRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.NotFoundResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ObjectFrame))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoObjectBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ObjectItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ObjectTrack))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrTablesAsyncRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OptionEnum), TypeInfoPropertyName = "OptionEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Page))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PaginatedBatchResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PaginatedExecutionListList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PaginatedPromptCreateList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptCreate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PaginatedPromptHistoryList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptHistory))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedAskYodaProjectUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedAssetUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedConversationDetailRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedCustomTokenUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedPromptHistoryRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedPromptUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedResourceUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TypeDe8Enum), TypeInfoPropertyName = "TypeDe8Enum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PatchedWebhookParametersRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PersonAttributes))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.UpperCloth))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PersonLandmarks))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PersonTracking))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoPersonPoses))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoPersonQuality))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoTrackingBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PlagiaDetectionCandidate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PlagiaDetectionItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PriceUnitTypeEnum), TypeInfoPropertyName = "PriceUnitTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PricingSerialzier))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OneOf), TypeInfoPropertyName = "OneOfDetailTypeEnumBlankEnumNullEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptCallRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptCreateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptHistoryRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptUpdate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.PromptUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Provider))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ProviderSubfeature))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Subfeature))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ProviderSubfeatureLanguage))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.QuestionAnswerAsyncRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.RepresentationEnum), TypeInfoPropertyName = "RepresentationEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResourceCreate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResourceCreateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResourceList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResourceUpdate))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResourceUpdateRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeEducation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeEducationEntry))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeLocation))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeExtractedData))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumePersonalInfo))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeWorkExp))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeLang))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeSkill))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumePersonalName))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ResumeWorkExpEntry))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Row))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SegmentSentimentAnalysisDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SentimentEbfEnum), TypeInfoPropertyName = "SentimentEbfEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ShotFrame))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SpeechDiarization))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SpeechDiarizationEntry))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SpeechToTextAsyncRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SpellCheckItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.SuggestionItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Status549Enum), TypeInfoPropertyName = "Status549Enum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Table))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextModerationItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextToSpeechAsyncRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OneOf), TypeInfoPropertyName = "OneOfOptionEnumBlankEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ThinkingTypeEnum), TypeInfoPropertyName = "ThinkingTypeEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.Dictionary>))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.Dictionary))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ToolCall))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ToolChoiceEnum), TypeInfoPropertyName = "ToolChoiceEnum2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoFace))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoLabel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoLabelTimeStamp))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoLabelBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoLogoBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoText))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoTextFrames))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoTextBoundingBox))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoTrackingPerson))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.WebhookParameters))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.WebhookParametersRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.Workflow))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaAskLlmResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaCreateProjectResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaDeleteResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaInfoResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaListChunksIdsResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaQueryResponse))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaQueryResponseItem))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.YodaQueryResponsePayload))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncaudiospeechToTextAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AudiospeechToTextAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncaudiotextToSpeechAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AudiotextToSpeechAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncocranonymizationAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcranonymizationAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncocrcustomDocumentParsingAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrcustomDocumentParsingAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncocrocrAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrocrAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncocrocrTablesAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrocrTablesAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideodeepfakeDetectionAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideodeepfakeDetectionAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideoexplicitContentDetectionAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoexplicitContentDetectionAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideofaceDetectionAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideofaceDetectionAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideogenerationAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideogenerationAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideolabelDetectionAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideolabelDetectionAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideologoDetectionAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideologoDetectionAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideoobjectTrackingAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoobjectTrackingAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideopersonTrackingAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideopersonTrackingAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideoquestionAnswerAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoquestionAnswerAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideoshotChangeDetectionAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoshotChangeDetectionAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AsyncvideotextDetectionAsyncResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideotextDetectionAsyncModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AudiotextToSpeechResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AudiotextToSpeechTextToSpeechDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AudiotextToSpeechTextToSpeechRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.AudiotextToSpeechAsyncTextToSpeechAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.CreditsSerializer))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageaiDetectionAiDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageaiDetectionAiDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageaiDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageanonymizationAnonymizationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageanonymizationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageanonymizationimagelandmarkDetectionimageexplicitContentimagedeepfakeDetectionImageRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageautomlClassificationAutomlClassificationCreateProjectDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageautomlClassificationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageautomlClassificationcreateProjectAutomlClassificationCreateProjectRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageautomlClassificationdeleteProjectAutomlClassificationDeleteRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagebackgroundRemovalBackgroundRemovalRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagebackgroundRemovalResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagedeepfakeDetectionDeepfakeDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagedeepfakeDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageembeddingsEmbeddingsDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageembeddingsEmbeddingsRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageembeddingsResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageexplicitContentExplicitContentDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageexplicitContentResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceCompareFaceCompareDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceCompareFaceCompareRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceCompareResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceDetectionFaceDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceDetectionFaceDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceRecognitionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceRecognitionaddFaceFaceRecognitionAddFaceRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceRecognitiondeleteFaceFaceRecognitionDeleteFaceRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagefaceRecognitionrecognizeFaceRecognitionDetectFaceRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagegenerationGenerationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagegenerationGenerationRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagegenerationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagelandmarkDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagelogoDetectionLogoDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagelogoDetectionLogoDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagelogoDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageobjectDetectionObjectDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageobjectDetectionObjectDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImageobjectDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagequestionAnswerQuestionAnswerRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagequestionAnswerResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagesearchResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagesearchSearchDeleteImageDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagesearchdeleteImageDeleteImageRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagesearchlaunchSimilaritySearchImageRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.ImagesearchuploadImageUploadImageRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LlmchatChatDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.DateTimeOffset))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LlmchatResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.LlmchatllmchatChatRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.MultimodalchatChatDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.MultimodalchatChatRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.MultimodalchatResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrbankCheckParsingBankCheckParsingRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrbankCheckParsingResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrcustomDocumentParsingAsyncCustomDocumentParsingAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrdataExtractionDataExtractionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrdataExtractionDataExtractionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrdataExtractionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrfinancialParserFinancialParserDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrfinancialParserFinancialParserRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrfinancialParserResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcridentityParserIdentityParserDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcridentityParserIdentityParserRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcridentityParserResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrocrOcrDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrocrOcrRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrocrResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrocrAsyncOcrAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrresumeParserResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrresumeParserResumeParserDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.OcrresumeParserResumeParserRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextaiDetectionAiDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextaiDetectionAiDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextaiDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextanonymizationAnonymizationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextanonymizationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextchatChatDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextchatChatRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextchatChatStreamRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextchatResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextcodeGenerationCodeGenerationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextcodeGenerationCodeGenerationRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextcodeGenerationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextembeddingsEmbeddingsDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextembeddingsEmbeddingsRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextembeddingsResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextemotionDetectionEmotionDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextemotionDetectionEmotionDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextemotionDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextentitySentimentEntitySentimentDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextentitySentimentEntitySentimentRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextentitySentimentResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextkeywordExtractionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextmoderationModerationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextmoderationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextnamedEntityRecognitionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextplagiaDetectionPlagiaDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextplagiaDetectionPlagiaDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextplagiaDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextpromptOptimizationPromptOptimizationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextpromptOptimizationPromptOptimizationRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextpromptOptimizationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextsentimentAnalysisResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextspellCheckResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextspellCheckSpellCheckDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextspellCheckSpellCheckRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextsummarizeResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextsummarizeSummarizeDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TextsummarizeSummarizeRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TexttopicExtractionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TexttopicExtractionTopicExtractionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TexttopicExtractiontextanonymizationtextmoderationtextnamedEntityRecognitiontextkeywordExtractiontextsyntaxAnalysistextsentimentAnalysisTextAnalysisRequest), TypeInfoPropertyName = "TexttopicExtractiontextanonymizationtextmoderationtextnamedEntityRecognitiontextkeywordExtractiontextsy_2d9618c64ac15db3")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationautomaticTranslationAutomaticTranslationRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationautomaticTranslationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationdocumentTranslationDocumentTranslationDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationdocumentTranslationDocumentTranslationRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationdocumentTranslationResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationlanguageDetectionLanguageDetectionRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.TranslationlanguageDetectionResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideodeepfakeDetectionAsyncDeepfakeDetectionAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoexplicitContentDetectionAsyncExplicitContentDetectionAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideofaceDetectionAsyncFaceDetectionAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideolabelDetectionAsyncLabelDetectionAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoobjectTrackingAsyncObjectTrackingAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideopersonTrackingAsyncPersonTrackingAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoquestionAnswerQuestionAnswerDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoquestionAnswerQuestionAnswerRequest))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoquestionAnswerResponseModel))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoquestionAnswerAsyncQuestionAnswerAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideoshotChangeDetectionAsyncShotChangeDetectionAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::EdenAI.FeatureBatchRetrieveStatus), TypeInfoPropertyName = "FeatureBatchRetrieveStatus2")] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.IList))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + [global::System.Text.Json.Serialization.JsonSerializable(typeof(global::System.Collections.Generic.List))] + public sealed partial class SourceGenerationContext : global::System.Text.Json.Serialization.JsonSerializerContext + { + } +} \ No newline at end of file diff --git a/src/libs/EdenAI/Generated/EdenAI.JsonSerializerContextTypes.g.cs b/src/libs/EdenAI/Generated/EdenAI.JsonSerializerContextTypes.g.cs new file mode 100644 index 0000000..24c5767 --- /dev/null +++ b/src/libs/EdenAI/Generated/EdenAI.JsonSerializerContextTypes.g.cs @@ -0,0 +1,2650 @@ + +#nullable enable + +#pragma warning disable CS0618 // Type or member is obsolete + +namespace EdenAI +{ + /// + /// + /// + public sealed partial class JsonSerializerContextTypes + { + /// + /// + /// + public global::System.Collections.Generic.Dictionary? StringStringDictionary { get; set; } + + /// + /// + /// + public global::System.Collections.Generic.Dictionary? StringObjectDictionary { get; set; } + + /// + /// + /// + public global::System.Text.Json.JsonElement? JsonElement { get; set; } + + /// + /// + /// + public global::EdenAI.AIProject? Type0 { get; set; } + /// + /// + /// + public global::System.Guid? Type1 { get; set; } + /// + /// + /// + public string? Type2 { get; set; } + /// + /// + /// + public global::EdenAI.ProjectTypeEnum? Type3 { get; set; } + /// + /// + /// + public global::System.DateTime? Type4 { get; set; } + /// + /// + /// + public global::EdenAI.AddFileRequest? Type5 { get; set; } + /// + /// + /// + public global::EdenAI.DataTypeEnum? Type6 { get; set; } + /// + /// + /// + public byte[]? Type7 { get; set; } + /// + /// + /// + public global::EdenAI.AddImageRequest? Type8 { get; set; } + /// + /// + /// + public global::EdenAI.AddTextRequest? Type9 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type10 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type11 { get; set; } + /// + /// + /// + public object? Type12 { get; set; } + /// + /// + /// + public global::EdenAI.AddUrlRequest? Type13 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type14 { get; set; } + /// + /// + /// + public bool? Type15 { get; set; } + /// + /// + /// + public global::EdenAI.AddVideoRequest? Type16 { get; set; } + /// + /// + /// + public global::EdenAI.AiDetectionItem? Type17 { get; set; } + /// + /// + /// + public int? Type18 { get; set; } + /// + /// + /// + public global::EdenAI.AiProductFile? Type19 { get; set; } + /// + /// + /// + public global::EdenAI.OneOf? Type20 { get; set; } + /// + /// + /// + public global::EdenAI.Status889Enum? Type21 { get; set; } + /// + /// + /// + public global::EdenAI.NullEnum? Type22 { get; set; } + /// + /// + /// + public global::EdenAI.AnonymizationAsyncRequest? Type23 { get; set; } + /// + /// + /// + public global::EdenAI.AnonymizationBoundingBox? Type24 { get; set; } + /// + /// + /// + public global::EdenAI.AnonymizationEntity? Type25 { get; set; } + /// + /// + /// + public global::EdenAI.CategoryType? Type26 { get; set; } + /// + /// + /// + public global::EdenAI.SubCategoryType? Type27 { get; set; } + /// + /// + /// + public global::EdenAI.AnonymizationItem? Type28 { get; set; } + /// + /// + /// + public global::EdenAI.AskLLMRequest? Type29 { get; set; } + /// + /// + /// + public double? Type30 { get; set; } + /// + /// + /// + public global::EdenAI.AskYodaProjectUpdate? Type31 { get; set; } + /// + /// + /// + public global::EdenAI.AskYourDataProjectRequest? Type32 { get; set; } + /// + /// + /// + public global::EdenAI.DbProviderEnum? Type33 { get; set; } + /// + /// + /// + public global::EdenAI.EmbeddingsProviderEnum? Type34 { get; set; } + /// + /// + /// + public global::EdenAI.AssetCreate? Type35 { get; set; } + /// + /// + /// + public global::EdenAI.AssetCreateRequest? Type36 { get; set; } + /// + /// + /// + public global::EdenAI.AssetList? Type37 { get; set; } + /// + /// + /// + public global::EdenAI.AssetListRequest? Type38 { get; set; } + /// + /// + /// + public global::EdenAI.AssetUpdate? Type39 { get; set; } + /// + /// + /// + public global::EdenAI.AssetUpdateRequest? Type40 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncJobList? Type41 { get; set; } + /// + /// + /// + public global::EdenAI.StateEnum? Type42 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncOcrRequest? Type43 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncVideoAnalysisRequest? Type44 { get; set; } + /// + /// + /// + public global::EdenAI.AutomlClassificationListProjectsResponse? Type45 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type46 { get; set; } + /// + /// + /// + public global::EdenAI.AutomlClassificationProject? Type47 { get; set; } + /// + /// + /// + public global::EdenAI.AutomlClassificationPredictRequest? Type48 { get; set; } + /// + /// + /// + public global::EdenAI.AutomlClassificationTrainRequest? Type49 { get; set; } + /// + /// + /// + public global::EdenAI.AutomlClassificationUploadDataRequest? Type50 { get; set; } + /// + /// + /// + public global::EdenAI.TypeOfDataEnum? Type51 { get; set; } + /// + /// + /// + public global::EdenAI.BadRequest? Type52 { get; set; } + /// + /// + /// + public global::EdenAI.NestedBadRequest? Type53 { get; set; } + /// + /// + /// + public global::EdenAI.BatchLaunchFailedRequest? Type54 { get; set; } + /// + /// + /// + public global::EdenAI.BatchLaunchResponse? Type55 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type56 { get; set; } + /// + /// + /// + public global::EdenAI.BatchList? Type57 { get; set; } + /// + /// + /// + public global::EdenAI.BatchRequest? Type58 { get; set; } + /// + /// + /// + public global::EdenAI.BatchResponseRequest? Type59 { get; set; } + /// + /// + /// + public global::EdenAI.BlankEnum? Type60 { get; set; } + /// + /// + /// + public global::EdenAI.BoundingBox? Type61 { get; set; } + /// + /// + /// + public global::EdenAI.BoundingBox2? Type62 { get; set; } + /// + /// + /// + public global::EdenAI.BoundixBoxOCRTable? Type63 { get; set; } + /// + /// + /// + public global::EdenAI.Cell? Type64 { get; set; } + /// + /// + /// + public global::EdenAI.ChatAvailableToolsRequest? Type65 { get; set; } + /// + /// + /// + public global::EdenAI.ChatCompletionChoice? Type66 { get; set; } + /// + /// + /// + public global::EdenAI.ChatMessage? Type67 { get; set; } + /// + /// + /// + public global::EdenAI.ChatCompletionUsage? Type68 { get; set; } + /// + /// + /// + public global::EdenAI.UsageTokensDetails? Type69 { get; set; } + /// + /// + /// + public global::EdenAI.ChatRole? Type70 { get; set; } + /// + /// + /// + public global::EdenAI.ChatMessageContent? Type71 { get; set; } + /// + /// + /// + public global::EdenAI.ChatMessageDataClass? Type72 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type73 { get; set; } + /// + /// + /// + public global::EdenAI.ChatMessageRequest? Type74 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type75 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type76 { get; set; } + /// + /// + /// + public global::EdenAI.ChatToolCallsRequest? Type77 { get; set; } + /// + /// + /// + public global::EdenAI.ChatToolResultRequest? Type78 { get; set; } + /// + /// + /// + public global::EdenAI.ContentNSFW? Type79 { get; set; } + /// + /// + /// + public global::EdenAI.Conversation? Type80 { get; set; } + /// + /// + /// + public global::EdenAI.ConversationDetail? Type81 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type82 { get; set; } + /// + /// + /// + public global::EdenAI.Message? Type83 { get; set; } + /// + /// + /// + public global::EdenAI.ConversationDetailRequest? Type84 { get; set; } + /// + /// + /// + public global::EdenAI.ConversationRequest? Type85 { get; set; } + /// + /// + /// + public global::EdenAI.CostMonitoringResponse? Type86 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type87 { get; set; } + /// + /// + /// + public global::EdenAI.TokenData? Type88 { get; set; } + /// + /// + /// + public global::EdenAI.Country? Type89 { get; set; } + /// + /// + /// + public global::EdenAI.CustomDocumentParsingAsyncBoundingBox? Type90 { get; set; } + /// + /// + /// + public global::EdenAI.CustomDocumentParsingAsyncItem? Type91 { get; set; } + /// + /// + /// + public global::EdenAI.CustomDocumentParsingAsyncRequest? Type92 { get; set; } + /// + /// + /// + public global::EdenAI.CustomTokenUpdate? Type93 { get; set; } + /// + /// + /// + public global::EdenAI.TokenTypeEnum? Type94 { get; set; } + /// + /// + /// + public global::EdenAI.CustomTokensCreate? Type95 { get; set; } + /// + /// + /// + public global::EdenAI.CustomTokensCreateRequest? Type96 { get; set; } + /// + /// + /// + public global::EdenAI.CustomTokensList? Type97 { get; set; } + /// + /// + /// + public global::EdenAI.DetailPerFrame? Type98 { get; set; } + /// + /// + /// + public global::EdenAI.PredictionB20Enum? Type99 { get; set; } + /// + /// + /// + public global::EdenAI.DetailTypeEnum? Type100 { get; set; } + /// + /// + /// + public global::EdenAI.Details? Type101 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.Dictionary? Type102 { get; set; } + /// + /// + /// + public global::EdenAI.DocumentTypeEnum? Type103 { get; set; } + /// + /// + /// + public global::EdenAI.EmbeddingDataClass? Type104 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type105 { get; set; } + /// + /// + /// + public global::EdenAI.EmotionItem? Type106 { get; set; } + /// + /// + /// + public global::EdenAI.Entity? Type107 { get; set; } + /// + /// + /// + public global::EdenAI.EntitySentimentEnum? Type108 { get; set; } + /// + /// + /// + public global::EdenAI.Error? Type109 { get; set; } + /// + /// + /// + public global::EdenAI.NestedError? Type110 { get; set; } + /// + /// + /// + public global::EdenAI.ExecutionContentCreate? Type111 { get; set; } + /// + /// + /// + public global::EdenAI.ExecutionContentCreateStatusEnum? Type112 { get; set; } + /// + /// + /// + public global::EdenAI.ExecutionExampleSuccessCreate? Type113 { get; set; } + /// + /// + /// + public global::EdenAI.ExecutionFakeCreateRequest? Type114 { get; set; } + /// + /// + /// + public global::EdenAI.ExecutionList? Type115 { get; set; } + /// + /// + /// + public global::EdenAI.ExplicitItem? Type116 { get; set; } + /// + /// + /// + public global::EdenAI.ExtractedTopic? Type117 { get; set; } + /// + /// + /// + public global::EdenAI.FaceAccessories? Type118 { get; set; } + /// + /// + /// + public global::EdenAI.FaceAttributes? Type119 { get; set; } + /// + /// + /// + public global::EdenAI.VideoFacePoses? Type120 { get; set; } + /// + /// + /// + public global::EdenAI.FaceBoundingBox? Type121 { get; set; } + /// + /// + /// + public global::EdenAI.FaceCompareBoundingBox? Type122 { get; set; } + /// + /// + /// + public global::EdenAI.FaceEmotions? Type123 { get; set; } + /// + /// + /// + public global::EdenAI.FaceFacialHair? Type124 { get; set; } + /// + /// + /// + public global::EdenAI.FaceFeatures? Type125 { get; set; } + /// + /// + /// + public global::EdenAI.FaceHair? Type126 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type127 { get; set; } + /// + /// + /// + public global::EdenAI.FaceHairColor? Type128 { get; set; } + /// + /// + /// + public global::EdenAI.FaceItem? Type129 { get; set; } + /// + /// + /// + public global::EdenAI.FaceLandmarks? Type130 { get; set; } + /// + /// + /// + public global::EdenAI.FacePoses? Type131 { get; set; } + /// + /// + /// + public global::EdenAI.FaceQuality? Type132 { get; set; } + /// + /// + /// + public global::EdenAI.FaceMakeup? Type133 { get; set; } + /// + /// + /// + public global::EdenAI.FaceOcclusions? Type134 { get; set; } + /// + /// + /// + public global::EdenAI.FaceMatch? Type135 { get; set; } + /// + /// + /// + public global::EdenAI.FallbackTypeEnum? Type136 { get; set; } + /// + /// + /// + public global::EdenAI.Feature? Type137 { get; set; } + /// + /// + /// + public global::EdenAI.FieldError? Type138 { get; set; } + /// + /// + /// + public global::EdenAI.FinalStatusEnum? Type139 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialBankInformation? Type140 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialBarcode? Type141 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialCustomerInformation? Type142 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialDocumentInformation? Type143 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type144 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialDocumentMetadata? Type145 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialLineItem? Type146 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialLocalInformation? Type147 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialMerchantInformation? Type148 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialParserObjectDataClass? Type149 { get; set; } + /// + /// + /// + public global::EdenAI.FinancialPaymentInformation? Type150 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type151 { get; set; } + /// + /// + /// + public global::EdenAI.GeneralSentimentEnum? Type152 { get; set; } + /// + /// + /// + public global::EdenAI.GenerateRequest? Type153 { get; set; } + /// + /// + /// + public global::EdenAI.ReasoningEffortEnum? Type154 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type155 { get; set; } + /// + /// + /// + public global::EdenAI.MetadataRequest? Type156 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.Dictionary? Type157 { get; set; } + /// + /// + /// + public global::EdenAI.ServiceTierEnum? Type158 { get; set; } + /// + /// + /// + public global::EdenAI.ThinkingRequest? Type159 { get; set; } + /// + /// + /// + public global::EdenAI.GeneratedImageDataClass? Type160 { get; set; } + /// + /// + /// + public global::EdenAI.GenerationAsyncRequest? Type161 { get; set; } + /// + /// + /// + public global::EdenAI.ImageaiDetectionAiDetectionDataClassPredictionEnum? Type162 { get; set; } + /// + /// + /// + public global::EdenAI.InfosIdentityParserDataClass? Type163 { get; set; } + /// + /// + /// + public global::EdenAI.ItemIdentityParserDataClass? Type164 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type165 { get; set; } + /// + /// + /// + public global::EdenAI.InfosKeywordExtractionDataClass? Type166 { get; set; } + /// + /// + /// + public global::EdenAI.InfosLanguageDetectionDataClass? Type167 { get; set; } + /// + /// + /// + public global::EdenAI.InfosNamedEntityRecognitionDataClass? Type168 { get; set; } + /// + /// + /// + public global::EdenAI.ItemBankCheckParsingDataClass? Type169 { get; set; } + /// + /// + /// + public global::EdenAI.MicrModel? Type170 { get; set; } + /// + /// + /// + public global::EdenAI.ItemDataExtraction? Type171 { get; set; } + /// + /// + /// + public global::EdenAI.LandmarkItem? Type172 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type173 { get; set; } + /// + /// + /// + public global::EdenAI.LandmarkVertice? Type174 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type175 { get; set; } + /// + /// + /// + public global::EdenAI.LandmarkLocation? Type176 { get; set; } + /// + /// + /// + public global::EdenAI.LandmarkLatLng? Type177 { get; set; } + /// + /// + /// + public global::EdenAI.LandmarksVideo? Type178 { get; set; } + /// + /// + /// + public global::EdenAI.LaunchAsyncJobResponse? Type179 { get; set; } + /// + /// + /// + public global::EdenAI.Line? Type180 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type181 { get; set; } + /// + /// + /// + public global::EdenAI.Word? Type182 { get; set; } + /// + /// + /// + public global::EdenAI.ListAsyncJobResponse? Type183 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type184 { get; set; } + /// + /// + /// + public global::EdenAI.ListChunkRequest? Type185 { get; set; } + /// + /// + /// + public global::EdenAI.LogoBoundingPoly? Type186 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type187 { get; set; } + /// + /// + /// + public global::EdenAI.LogoVertice? Type188 { get; set; } + /// + /// + /// + public global::EdenAI.LogoItem? Type189 { get; set; } + /// + /// + /// + public global::EdenAI.LogoTrack? Type190 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type191 { get; set; } + /// + /// + /// + public global::EdenAI.VideoLogo? Type192 { get; set; } + /// + /// + /// + public global::EdenAI.LowerCloth? Type193 { get; set; } + /// + /// + /// + public global::EdenAI.MessageRequest? Type194 { get; set; } + /// + /// + /// + public global::EdenAI.NotFoundResponse? Type195 { get; set; } + /// + /// + /// + public global::EdenAI.ObjectFrame? Type196 { get; set; } + /// + /// + /// + public global::EdenAI.VideoObjectBoundingBox? Type197 { get; set; } + /// + /// + /// + public global::EdenAI.ObjectItem? Type198 { get; set; } + /// + /// + /// + public global::EdenAI.ObjectTrack? Type199 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type200 { get; set; } + /// + /// + /// + public global::EdenAI.OcrTablesAsyncRequest? Type201 { get; set; } + /// + /// + /// + public global::EdenAI.OptionEnum? Type202 { get; set; } + /// + /// + /// + public global::EdenAI.Page? Type203 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type204 { get; set; } + /// + /// + /// + public global::EdenAI.PaginatedBatchResponse? Type205 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type206 { get; set; } + /// + /// + /// + public global::EdenAI.PaginatedExecutionListList? Type207 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type208 { get; set; } + /// + /// + /// + public global::EdenAI.PaginatedPromptCreateList? Type209 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type210 { get; set; } + /// + /// + /// + public global::EdenAI.PromptCreate? Type211 { get; set; } + /// + /// + /// + public global::EdenAI.PaginatedPromptHistoryList? Type212 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type213 { get; set; } + /// + /// + /// + public global::EdenAI.PromptHistory? Type214 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedAskYodaProjectUpdateRequest? Type215 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedAssetUpdateRequest? Type216 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedConversationDetailRequest? Type217 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedCustomTokenUpdateRequest? Type218 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedPromptHistoryRequest? Type219 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedPromptUpdateRequest? Type220 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedResourceUpdateRequest? Type221 { get; set; } + /// + /// + /// + public global::EdenAI.TypeDe8Enum? Type222 { get; set; } + /// + /// + /// + public global::EdenAI.PatchedWebhookParametersRequest? Type223 { get; set; } + /// + /// + /// + public global::EdenAI.PersonAttributes? Type224 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type225 { get; set; } + /// + /// + /// + public global::EdenAI.UpperCloth? Type226 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type227 { get; set; } + /// + /// + /// + public global::EdenAI.PersonLandmarks? Type228 { get; set; } + /// + /// + /// + public global::EdenAI.PersonTracking? Type229 { get; set; } + /// + /// + /// + public global::EdenAI.VideoPersonPoses? Type230 { get; set; } + /// + /// + /// + public global::EdenAI.VideoPersonQuality? Type231 { get; set; } + /// + /// + /// + public global::EdenAI.VideoTrackingBoundingBox? Type232 { get; set; } + /// + /// + /// + public global::EdenAI.PlagiaDetectionCandidate? Type233 { get; set; } + /// + /// + /// + public global::EdenAI.PlagiaDetectionItem? Type234 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type235 { get; set; } + /// + /// + /// + public global::EdenAI.PriceUnitTypeEnum? Type236 { get; set; } + /// + /// + /// + public global::EdenAI.PricingSerialzier? Type237 { get; set; } + /// + /// + /// + public global::EdenAI.OneOf? Type238 { get; set; } + /// + /// + /// + public global::EdenAI.PromptCallRequest? Type239 { get; set; } + /// + /// + /// + public global::EdenAI.PromptCreateRequest? Type240 { get; set; } + /// + /// + /// + public global::EdenAI.PromptDataClass? Type241 { get; set; } + /// + /// + /// + public global::EdenAI.PromptHistoryRequest? Type242 { get; set; } + /// + /// + /// + public global::EdenAI.PromptUpdate? Type243 { get; set; } + /// + /// + /// + public global::EdenAI.PromptUpdateRequest? Type244 { get; set; } + /// + /// + /// + public global::EdenAI.Provider? Type245 { get; set; } + /// + /// + /// + public global::EdenAI.ProviderSubfeature? Type246 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type247 { get; set; } + /// + /// + /// + public global::EdenAI.Subfeature? Type248 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type249 { get; set; } + /// + /// + /// + public global::EdenAI.ProviderSubfeatureLanguage? Type250 { get; set; } + /// + /// + /// + public global::EdenAI.QuestionAnswerAsyncRequest? Type251 { get; set; } + /// + /// + /// + public global::EdenAI.RepresentationEnum? Type252 { get; set; } + /// + /// + /// + public global::EdenAI.ResourceCreate? Type253 { get; set; } + /// + /// + /// + public global::EdenAI.ResourceCreateRequest? Type254 { get; set; } + /// + /// + /// + public global::EdenAI.ResourceList? Type255 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type256 { get; set; } + /// + /// + /// + public global::EdenAI.ResourceUpdate? Type257 { get; set; } + /// + /// + /// + public global::EdenAI.ResourceUpdateRequest? Type258 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeEducation? Type259 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type260 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeEducationEntry? Type261 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeLocation? Type262 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeExtractedData? Type263 { get; set; } + /// + /// + /// + public global::EdenAI.ResumePersonalInfo? Type264 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeWorkExp? Type265 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type266 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeLang? Type267 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type268 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeSkill? Type269 { get; set; } + /// + /// + /// + public global::EdenAI.ResumePersonalName? Type270 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type271 { get; set; } + /// + /// + /// + public global::EdenAI.ResumeWorkExpEntry? Type272 { get; set; } + /// + /// + /// + public global::EdenAI.Row? Type273 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type274 { get; set; } + /// + /// + /// + public global::EdenAI.SegmentSentimentAnalysisDataClass? Type275 { get; set; } + /// + /// + /// + public global::EdenAI.SentimentEbfEnum? Type276 { get; set; } + /// + /// + /// + public global::EdenAI.ShotFrame? Type277 { get; set; } + /// + /// + /// + public global::EdenAI.SpeechDiarization? Type278 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type279 { get; set; } + /// + /// + /// + public global::EdenAI.SpeechDiarizationEntry? Type280 { get; set; } + /// + /// + /// + public global::EdenAI.SpeechToTextAsyncRequest? Type281 { get; set; } + /// + /// + /// + public global::EdenAI.SpellCheckItem? Type282 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type283 { get; set; } + /// + /// + /// + public global::EdenAI.SuggestionItem? Type284 { get; set; } + /// + /// + /// + public global::EdenAI.Status549Enum? Type285 { get; set; } + /// + /// + /// + public global::EdenAI.Table? Type286 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type287 { get; set; } + /// + /// + /// + public global::EdenAI.TextModerationItem? Type288 { get; set; } + /// + /// + /// + public global::EdenAI.TextToSpeechAsyncRequest? Type289 { get; set; } + /// + /// + /// + public global::EdenAI.OneOf? Type290 { get; set; } + /// + /// + /// + public global::EdenAI.ThinkingTypeEnum? Type291 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.Dictionary>? Type292 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.Dictionary? Type293 { get; set; } + /// + /// + /// + public global::EdenAI.ToolCall? Type294 { get; set; } + /// + /// + /// + public global::EdenAI.ToolChoiceEnum? Type295 { get; set; } + /// + /// + /// + public global::EdenAI.VideoBoundingBox? Type296 { get; set; } + /// + /// + /// + public global::EdenAI.VideoFace? Type297 { get; set; } + /// + /// + /// + public global::EdenAI.VideoLabel? Type298 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type299 { get; set; } + /// + /// + /// + public global::EdenAI.VideoLabelTimeStamp? Type300 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type301 { get; set; } + /// + /// + /// + public global::EdenAI.VideoLabelBoundingBox? Type302 { get; set; } + /// + /// + /// + public global::EdenAI.VideoLogoBoundingBox? Type303 { get; set; } + /// + /// + /// + public global::EdenAI.VideoText? Type304 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type305 { get; set; } + /// + /// + /// + public global::EdenAI.VideoTextFrames? Type306 { get; set; } + /// + /// + /// + public global::EdenAI.VideoTextBoundingBox? Type307 { get; set; } + /// + /// + /// + public global::EdenAI.VideoTrackingPerson? Type308 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type309 { get; set; } + /// + /// + /// + public global::EdenAI.WebhookParameters? Type310 { get; set; } + /// + /// + /// + public global::EdenAI.WebhookParametersRequest? Type311 { get; set; } + /// + /// + /// + public global::EdenAI.Workflow? Type312 { get; set; } + /// + /// + /// + public global::EdenAI.YodaAskLlmResponse? Type313 { get; set; } + /// + /// + /// + public global::EdenAI.YodaCreateProjectResponse? Type314 { get; set; } + /// + /// + /// + public global::EdenAI.YodaDeleteResponse? Type315 { get; set; } + /// + /// + /// + public global::EdenAI.YodaInfoResponse? Type316 { get; set; } + /// + /// + /// + public global::EdenAI.YodaListChunksIdsResponse? Type317 { get; set; } + /// + /// + /// + public global::EdenAI.YodaQueryResponse? Type318 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type319 { get; set; } + /// + /// + /// + public global::EdenAI.YodaQueryResponseItem? Type320 { get; set; } + /// + /// + /// + public global::EdenAI.YodaQueryResponsePayload? Type321 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncaudiospeechToTextAsyncResponseModel? Type322 { get; set; } + /// + /// + /// + public global::EdenAI.AudiospeechToTextAsyncModel? Type323 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncaudiotextToSpeechAsyncResponseModel? Type324 { get; set; } + /// + /// + /// + public global::EdenAI.AudiotextToSpeechAsyncModel? Type325 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncocranonymizationAsyncResponseModel? Type326 { get; set; } + /// + /// + /// + public global::EdenAI.OcranonymizationAsyncModel? Type327 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncocrcustomDocumentParsingAsyncResponseModel? Type328 { get; set; } + /// + /// + /// + public global::EdenAI.OcrcustomDocumentParsingAsyncModel? Type329 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncocrocrAsyncResponseModel? Type330 { get; set; } + /// + /// + /// + public global::EdenAI.OcrocrAsyncModel? Type331 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncocrocrTablesAsyncResponseModel? Type332 { get; set; } + /// + /// + /// + public global::EdenAI.OcrocrTablesAsyncModel? Type333 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideodeepfakeDetectionAsyncResponseModel? Type334 { get; set; } + /// + /// + /// + public global::EdenAI.VideodeepfakeDetectionAsyncModel? Type335 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideoexplicitContentDetectionAsyncResponseModel? Type336 { get; set; } + /// + /// + /// + public global::EdenAI.VideoexplicitContentDetectionAsyncModel? Type337 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideofaceDetectionAsyncResponseModel? Type338 { get; set; } + /// + /// + /// + public global::EdenAI.VideofaceDetectionAsyncModel? Type339 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideogenerationAsyncResponseModel? Type340 { get; set; } + /// + /// + /// + public global::EdenAI.VideogenerationAsyncModel? Type341 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideolabelDetectionAsyncResponseModel? Type342 { get; set; } + /// + /// + /// + public global::EdenAI.VideolabelDetectionAsyncModel? Type343 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideologoDetectionAsyncResponseModel? Type344 { get; set; } + /// + /// + /// + public global::EdenAI.VideologoDetectionAsyncModel? Type345 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideoobjectTrackingAsyncResponseModel? Type346 { get; set; } + /// + /// + /// + public global::EdenAI.VideoobjectTrackingAsyncModel? Type347 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideopersonTrackingAsyncResponseModel? Type348 { get; set; } + /// + /// + /// + public global::EdenAI.VideopersonTrackingAsyncModel? Type349 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideoquestionAnswerAsyncResponseModel? Type350 { get; set; } + /// + /// + /// + public global::EdenAI.VideoquestionAnswerAsyncModel? Type351 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideoshotChangeDetectionAsyncResponseModel? Type352 { get; set; } + /// + /// + /// + public global::EdenAI.VideoshotChangeDetectionAsyncModel? Type353 { get; set; } + /// + /// + /// + public global::EdenAI.AsyncvideotextDetectionAsyncResponseModel? Type354 { get; set; } + /// + /// + /// + public global::EdenAI.VideotextDetectionAsyncModel? Type355 { get; set; } + /// + /// + /// + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Type356 { get; set; } + /// + /// + /// + public global::EdenAI.AudiotextToSpeechResponseModel? Type357 { get; set; } + /// + /// + /// + public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Type358 { get; set; } + /// + /// + /// + public global::EdenAI.AudiotextToSpeechTextToSpeechRequest? Type359 { get; set; } + /// + /// + /// + public global::EdenAI.AudiotextToSpeechAsyncTextToSpeechAsyncDataClass? Type360 { get; set; } + /// + /// + /// + public global::EdenAI.CreditsSerializer? Type361 { get; set; } + /// + /// + /// + public global::EdenAI.ImageaiDetectionAiDetectionDataClass? Type362 { get; set; } + /// + /// + /// + public global::EdenAI.ImageaiDetectionAiDetectionRequest? Type363 { get; set; } + /// + /// + /// + public global::EdenAI.ImageaiDetectionResponseModel? Type364 { get; set; } + /// + /// + /// + public global::EdenAI.ImageanonymizationAnonymizationDataClass? Type365 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type366 { get; set; } + /// + /// + /// + public global::EdenAI.ImageanonymizationResponseModel? Type367 { get; set; } + /// + /// + /// + public global::EdenAI.ImageanonymizationimagelandmarkDetectionimageexplicitContentimagedeepfakeDetectionImageRequest? Type368 { get; set; } + /// + /// + /// + public global::EdenAI.ImageautomlClassificationAutomlClassificationCreateProjectDataClass? Type369 { get; set; } + /// + /// + /// + public global::EdenAI.ImageautomlClassificationResponseModel? Type370 { get; set; } + /// + /// + /// + public global::EdenAI.ImageautomlClassificationcreateProjectAutomlClassificationCreateProjectRequest? Type371 { get; set; } + /// + /// + /// + public global::EdenAI.ImageautomlClassificationdeleteProjectAutomlClassificationDeleteRequest? Type372 { get; set; } + /// + /// + /// + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Type373 { get; set; } + /// + /// + /// + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalRequest? Type374 { get; set; } + /// + /// + /// + public global::EdenAI.ImagebackgroundRemovalResponseModel? Type375 { get; set; } + /// + /// + /// + public global::EdenAI.ImagedeepfakeDetectionDeepfakeDetectionDataClass? Type376 { get; set; } + /// + /// + /// + public global::EdenAI.ImagedeepfakeDetectionResponseModel? Type377 { get; set; } + /// + /// + /// + public global::EdenAI.ImageembeddingsEmbeddingsDataClass? Type378 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type379 { get; set; } + /// + /// + /// + public global::EdenAI.ImageembeddingsEmbeddingsRequest? Type380 { get; set; } + /// + /// + /// + public global::EdenAI.ImageembeddingsResponseModel? Type381 { get; set; } + /// + /// + /// + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Type382 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type383 { get; set; } + /// + /// + /// + public global::EdenAI.ImageexplicitContentResponseModel? Type384 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceCompareFaceCompareDataClass? Type385 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type386 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceCompareFaceCompareRequest? Type387 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceCompareResponseModel? Type388 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? Type389 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type390 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceDetectionFaceDetectionRequest? Type391 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceDetectionResponseModel? Type392 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? Type393 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceRecognitionResponseModel? Type394 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceRecognitionaddFaceFaceRecognitionAddFaceRequest? Type395 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceRecognitiondeleteFaceFaceRecognitionDeleteFaceRequest? Type396 { get; set; } + /// + /// + /// + public global::EdenAI.ImagefaceRecognitionrecognizeFaceRecognitionDetectFaceRequest? Type397 { get; set; } + /// + /// + /// + public global::EdenAI.ImagegenerationGenerationDataClass? Type398 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type399 { get; set; } + /// + /// + /// + public global::EdenAI.ImagegenerationGenerationRequest? Type400 { get; set; } + /// + /// + /// + public global::EdenAI.ImagegenerationResponseModel? Type401 { get; set; } + /// + /// + /// + public global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? Type402 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type403 { get; set; } + /// + /// + /// + public global::EdenAI.ImagelandmarkDetectionResponseModel? Type404 { get; set; } + /// + /// + /// + public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Type405 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type406 { get; set; } + /// + /// + /// + public global::EdenAI.ImagelogoDetectionLogoDetectionRequest? Type407 { get; set; } + /// + /// + /// + public global::EdenAI.ImagelogoDetectionResponseModel? Type408 { get; set; } + /// + /// + /// + public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Type409 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type410 { get; set; } + /// + /// + /// + public global::EdenAI.ImageobjectDetectionObjectDetectionRequest? Type411 { get; set; } + /// + /// + /// + public global::EdenAI.ImageobjectDetectionResponseModel? Type412 { get; set; } + /// + /// + /// + public global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? Type413 { get; set; } + /// + /// + /// + public global::EdenAI.ImagequestionAnswerQuestionAnswerRequest? Type414 { get; set; } + /// + /// + /// + public global::EdenAI.ImagequestionAnswerResponseModel? Type415 { get; set; } + /// + /// + /// + public global::EdenAI.ImagesearchResponseModel? Type416 { get; set; } + /// + /// + /// + public global::EdenAI.ImagesearchSearchDeleteImageDataClass? Type417 { get; set; } + /// + /// + /// + public global::EdenAI.ImagesearchdeleteImageDeleteImageRequest? Type418 { get; set; } + /// + /// + /// + public global::EdenAI.ImagesearchlaunchSimilaritySearchImageRequest? Type419 { get; set; } + /// + /// + /// + public global::EdenAI.ImagesearchuploadImageUploadImageRequest? Type420 { get; set; } + /// + /// + /// + public global::EdenAI.LlmchatChatDataClass? Type421 { get; set; } + /// + /// + /// + public global::System.DateTimeOffset? Type422 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type423 { get; set; } + /// + /// + /// + public global::EdenAI.LlmchatResponseModel? Type424 { get; set; } + /// + /// + /// + public global::EdenAI.LlmchatllmchatChatRequest? Type425 { get; set; } + /// + /// + /// + public global::EdenAI.MultimodalchatChatDataClass? Type426 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type427 { get; set; } + /// + /// + /// + public global::EdenAI.MultimodalchatChatRequest? Type428 { get; set; } + /// + /// + /// + public global::EdenAI.MultimodalchatResponseModel? Type429 { get; set; } + /// + /// + /// + public global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? Type430 { get; set; } + /// + /// + /// + public global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? Type431 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type432 { get; set; } + /// + /// + /// + public global::EdenAI.OcrbankCheckParsingBankCheckParsingRequest? Type433 { get; set; } + /// + /// + /// + public global::EdenAI.OcrbankCheckParsingResponseModel? Type434 { get; set; } + /// + /// + /// + public global::EdenAI.OcrcustomDocumentParsingAsyncCustomDocumentParsingAsyncDataClass? Type435 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type436 { get; set; } + /// + /// + /// + public global::EdenAI.OcrdataExtractionDataExtractionDataClass? Type437 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type438 { get; set; } + /// + /// + /// + public global::EdenAI.OcrdataExtractionDataExtractionRequest? Type439 { get; set; } + /// + /// + /// + public global::EdenAI.OcrdataExtractionResponseModel? Type440 { get; set; } + /// + /// + /// + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Type441 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type442 { get; set; } + /// + /// + /// + public global::EdenAI.OcrfinancialParserFinancialParserRequest? Type443 { get; set; } + /// + /// + /// + public global::EdenAI.OcrfinancialParserResponseModel? Type444 { get; set; } + /// + /// + /// + public global::EdenAI.OcridentityParserIdentityParserDataClass? Type445 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type446 { get; set; } + /// + /// + /// + public global::EdenAI.OcridentityParserIdentityParserRequest? Type447 { get; set; } + /// + /// + /// + public global::EdenAI.OcridentityParserResponseModel? Type448 { get; set; } + /// + /// + /// + public global::EdenAI.OcrocrOcrDataClass? Type449 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type450 { get; set; } + /// + /// + /// + public global::EdenAI.OcrocrOcrRequest? Type451 { get; set; } + /// + /// + /// + public global::EdenAI.OcrocrResponseModel? Type452 { get; set; } + /// + /// + /// + public global::EdenAI.OcrocrAsyncOcrAsyncDataClass? Type453 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type454 { get; set; } + /// + /// + /// + public global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? Type455 { get; set; } + /// + /// + /// + public global::EdenAI.OcrresumeParserResponseModel? Type456 { get; set; } + /// + /// + /// + public global::EdenAI.OcrresumeParserResumeParserDataClass? Type457 { get; set; } + /// + /// + /// + public global::EdenAI.OcrresumeParserResumeParserRequest? Type458 { get; set; } + /// + /// + /// + public global::EdenAI.TextaiDetectionAiDetectionDataClass? Type459 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type460 { get; set; } + /// + /// + /// + public global::EdenAI.TextaiDetectionAiDetectionRequest? Type461 { get; set; } + /// + /// + /// + public global::EdenAI.TextaiDetectionResponseModel? Type462 { get; set; } + /// + /// + /// + public global::EdenAI.TextanonymizationAnonymizationDataClass? Type463 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type464 { get; set; } + /// + /// + /// + public global::EdenAI.TextanonymizationResponseModel? Type465 { get; set; } + /// + /// + /// + public global::EdenAI.TextchatChatDataClass? Type466 { get; set; } + /// + /// + /// + public global::EdenAI.TextchatChatRequest? Type467 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type468 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type469 { get; set; } + /// + /// + /// + public global::EdenAI.TextchatChatStreamRequest? Type470 { get; set; } + /// + /// + /// + public global::EdenAI.TextchatResponseModel? Type471 { get; set; } + /// + /// + /// + public global::EdenAI.TextcodeGenerationCodeGenerationDataClass? Type472 { get; set; } + /// + /// + /// + public global::EdenAI.TextcodeGenerationCodeGenerationRequest? Type473 { get; set; } + /// + /// + /// + public global::EdenAI.TextcodeGenerationResponseModel? Type474 { get; set; } + /// + /// + /// + public global::EdenAI.TextembeddingsEmbeddingsDataClass? Type475 { get; set; } + /// + /// + /// + public global::EdenAI.TextembeddingsEmbeddingsRequest? Type476 { get; set; } + /// + /// + /// + public global::EdenAI.TextembeddingsResponseModel? Type477 { get; set; } + /// + /// + /// + public global::EdenAI.TextemotionDetectionEmotionDetectionDataClass? Type478 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type479 { get; set; } + /// + /// + /// + public global::EdenAI.TextemotionDetectionEmotionDetectionRequest? Type480 { get; set; } + /// + /// + /// + public global::EdenAI.TextemotionDetectionResponseModel? Type481 { get; set; } + /// + /// + /// + public global::EdenAI.TextentitySentimentEntitySentimentDataClass? Type482 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type483 { get; set; } + /// + /// + /// + public global::EdenAI.TextentitySentimentEntitySentimentRequest? Type484 { get; set; } + /// + /// + /// + public global::EdenAI.TextentitySentimentResponseModel? Type485 { get; set; } + /// + /// + /// + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Type486 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type487 { get; set; } + /// + /// + /// + public global::EdenAI.TextkeywordExtractionResponseModel? Type488 { get; set; } + /// + /// + /// + public global::EdenAI.TextmoderationModerationDataClass? Type489 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type490 { get; set; } + /// + /// + /// + public global::EdenAI.TextmoderationResponseModel? Type491 { get; set; } + /// + /// + /// + public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Type492 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type493 { get; set; } + /// + /// + /// + public global::EdenAI.TextnamedEntityRecognitionResponseModel? Type494 { get; set; } + /// + /// + /// + public global::EdenAI.TextplagiaDetectionPlagiaDetectionDataClass? Type495 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type496 { get; set; } + /// + /// + /// + public global::EdenAI.TextplagiaDetectionPlagiaDetectionRequest? Type497 { get; set; } + /// + /// + /// + public global::EdenAI.TextplagiaDetectionResponseModel? Type498 { get; set; } + /// + /// + /// + public global::EdenAI.TextpromptOptimizationPromptOptimizationDataClass? Type499 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type500 { get; set; } + /// + /// + /// + public global::EdenAI.TextpromptOptimizationPromptOptimizationRequest? Type501 { get; set; } + /// + /// + /// + public global::EdenAI.TextpromptOptimizationResponseModel? Type502 { get; set; } + /// + /// + /// + public global::EdenAI.TextsentimentAnalysisResponseModel? Type503 { get; set; } + /// + /// + /// + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Type504 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type505 { get; set; } + /// + /// + /// + public global::EdenAI.TextspellCheckResponseModel? Type506 { get; set; } + /// + /// + /// + public global::EdenAI.TextspellCheckSpellCheckDataClass? Type507 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type508 { get; set; } + /// + /// + /// + public global::EdenAI.TextspellCheckSpellCheckRequest? Type509 { get; set; } + /// + /// + /// + public global::EdenAI.TextsummarizeResponseModel? Type510 { get; set; } + /// + /// + /// + public global::EdenAI.TextsummarizeSummarizeDataClass? Type511 { get; set; } + /// + /// + /// + public global::EdenAI.TextsummarizeSummarizeRequest? Type512 { get; set; } + /// + /// + /// + public global::EdenAI.TexttopicExtractionResponseModel? Type513 { get; set; } + /// + /// + /// + public global::EdenAI.TexttopicExtractionTopicExtractionDataClass? Type514 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type515 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Type516 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationRequest? Type517 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationautomaticTranslationResponseModel? Type518 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationdocumentTranslationDocumentTranslationDataClass? Type519 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationdocumentTranslationDocumentTranslationRequest? Type520 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationdocumentTranslationResponseModel? Type521 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Type522 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type523 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationlanguageDetectionLanguageDetectionRequest? Type524 { get; set; } + /// + /// + /// + public global::EdenAI.TranslationlanguageDetectionResponseModel? Type525 { get; set; } + /// + /// + /// + public global::EdenAI.VideodeepfakeDetectionAsyncDeepfakeDetectionAsyncDataClass? Type526 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type527 { get; set; } + /// + /// + /// + public global::EdenAI.VideoexplicitContentDetectionAsyncExplicitContentDetectionAsyncDataClass? Type528 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type529 { get; set; } + /// + /// + /// + public global::EdenAI.VideofaceDetectionAsyncFaceDetectionAsyncDataClass? Type530 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type531 { get; set; } + /// + /// + /// + public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Type532 { get; set; } + /// + /// + /// + public global::EdenAI.VideolabelDetectionAsyncLabelDetectionAsyncDataClass? Type533 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type534 { get; set; } + /// + /// + /// + public global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? Type535 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type536 { get; set; } + /// + /// + /// + public global::EdenAI.VideoobjectTrackingAsyncObjectTrackingAsyncDataClass? Type537 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type538 { get; set; } + /// + /// + /// + public global::EdenAI.VideopersonTrackingAsyncPersonTrackingAsyncDataClass? Type539 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type540 { get; set; } + /// + /// + /// + public global::EdenAI.VideoquestionAnswerQuestionAnswerDataClass? Type541 { get; set; } + /// + /// + /// + public global::EdenAI.VideoquestionAnswerQuestionAnswerRequest? Type542 { get; set; } + /// + /// + /// + public global::EdenAI.VideoquestionAnswerResponseModel? Type543 { get; set; } + /// + /// + /// + public global::EdenAI.VideoquestionAnswerAsyncQuestionAnswerAsyncDataClass? Type544 { get; set; } + /// + /// + /// + public global::EdenAI.VideoshotChangeDetectionAsyncShotChangeDetectionAsyncDataClass? Type545 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type546 { get; set; } + /// + /// + /// + public global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? Type547 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type548 { get; set; } + /// + /// + /// + public global::EdenAI.FeatureBatchRetrieveStatus? Type549 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type550 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type551 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type552 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type553 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type554 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type555 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type556 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.IList? Type557 { get; set; } + + /// + /// + /// + public global::System.Collections.Generic.List? ListType0 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType1 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType2 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType3 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType4 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType5 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType6 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType7 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType8 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType9 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType10 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType11 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType12 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType13 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType14 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType15 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType16 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType17 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType18 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType19 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType20 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType21 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType22 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType23 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType24 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType25 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType26 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType27 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType28 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType29 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType30 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType31 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType32 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType33 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType34 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType35 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType36 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType37 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType38 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType39 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType40 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType41 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType42 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType43 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType44 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType45 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType46 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType47 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType48 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType49 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType50 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType51 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType52 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType53 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType54 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType55 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType56 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType57 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType58 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType59 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType60 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType61 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType62 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType63 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType64 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType65 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType66 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType67 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType68 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType69 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType70 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType71 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType72 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType73 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType74 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType75 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType76 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType77 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType78 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType79 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType80 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType81 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType82 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType83 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType84 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType85 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType86 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType87 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType88 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType89 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType90 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType91 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType92 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType93 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType94 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType95 { get; set; } + /// + /// + /// + public global::System.Collections.Generic.List? ListType96 { get; set; } + } +} \ No newline at end of file diff --git a/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.TextTextKeywordExtractionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.TextTextKeywordExtractionCreate.g.cs index 9c4ff85..b921c1e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.TextTextKeywordExtractionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.TextTextKeywordExtractionCreate.g.cs @@ -238,7 +238,7 @@ partial void ProcessTextTextKeywordExtractionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -275,13 +275,13 @@ partial void ProcessTextTextKeywordExtractionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -313,13 +313,13 @@ partial void ProcessTextTextKeywordExtractionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -351,13 +351,13 @@ partial void ProcessTextTextKeywordExtractionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -389,13 +389,13 @@ partial void ProcessTextTextKeywordExtractionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -439,7 +439,7 @@ partial void ProcessTextTextKeywordExtractionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextkeywordExtractionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextkeywordExtractionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -469,7 +469,7 @@ partial void ProcessTextTextKeywordExtractionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextkeywordExtractionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextkeywordExtractionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.g.cs index 7a1cce3..8091d16 100644 --- a/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.KeywordExtractionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class KeywordExtractionClient : global::EdenAI.IKeywordExt /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncCreate.g.cs index 1cf60d2..609f2fe 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncCreate.g.cs @@ -190,7 +190,7 @@ partial void ProcessVideoVideoLabelDetectionAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -220,7 +220,7 @@ partial void ProcessVideoVideoLabelDetectionAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve.g.cs index f101fd8..c0a3fd8 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve2.g.cs index 2d4f2dd..aa71008 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.VideoVideoLabelDetectionAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideolabelDetectionAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideolabelDetectionAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoLabelDetectionAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideolabelDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideolabelDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.g.cs index 1d0fa3e..a3111d1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LabelDetectionAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class LabelDetectionAsyncClient : global::EdenAI.ILabelDet /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.ImageImageLandmarkDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.ImageImageLandmarkDetectionCreate.g.cs index c8f2bdc..4d56d6c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.ImageImageLandmarkDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.ImageImageLandmarkDetectionCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessImageImageLandmarkDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessImageImageLandmarkDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessImageImageLandmarkDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessImageImageLandmarkDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessImageImageLandmarkDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessImageImageLandmarkDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagelandmarkDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagelandmarkDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessImageImageLandmarkDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagelandmarkDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagelandmarkDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.g.cs index 30880bb..fc21b4e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LandmarkDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class LandmarkDetectionClient : global::EdenAI.ILandmarkDe /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.TranslationTranslationLanguageDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.TranslationTranslationLanguageDetectionCreate.g.cs index 7efdcd8..de9e2c5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.TranslationTranslationLanguageDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.TranslationTranslationLanguageDetectionCreate.g.cs @@ -86,7 +86,7 @@ partial void ProcessTranslationTranslationLanguageDetectionCreateResponseContent __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -123,13 +123,13 @@ partial void ProcessTranslationTranslationLanguageDetectionCreateResponseContent if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -161,13 +161,13 @@ partial void ProcessTranslationTranslationLanguageDetectionCreateResponseContent if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -199,13 +199,13 @@ partial void ProcessTranslationTranslationLanguageDetectionCreateResponseContent if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -237,13 +237,13 @@ partial void ProcessTranslationTranslationLanguageDetectionCreateResponseContent if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -287,7 +287,7 @@ partial void ProcessTranslationTranslationLanguageDetectionCreateResponseContent __response.EnsureSuccessStatusCode(); return - global::EdenAI.TranslationlanguageDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TranslationlanguageDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -317,7 +317,7 @@ partial void ProcessTranslationTranslationLanguageDetectionCreateResponseContent ).ConfigureAwait(false); return - await global::EdenAI.TranslationlanguageDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TranslationlanguageDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.g.cs index f45959a..b039519 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LanguageDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class LanguageDetectionClient : global::EdenAI.ILanguageDe /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncCreate.g.cs index 08377c4..a340451 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncCreate.g.cs @@ -190,7 +190,7 @@ partial void ProcessVideoVideoLogoDetectionAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -220,7 +220,7 @@ partial void ProcessVideoVideoLogoDetectionAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve.g.cs index 17285cd..21c292b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve2.g.cs index e40fb94..b17294c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.VideoVideoLogoDetectionAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideologoDetectionAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideologoDetectionAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoLogoDetectionAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideologoDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideologoDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.g.cs index 0e8392a..614f800 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class LogoDetectionAsyncClient : global::EdenAI.ILogoDetec /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.ImageImageLogoDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.ImageImageLogoDetectionCreate.g.cs index f66ebe5..13b60ff 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.ImageImageLogoDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.ImageImageLogoDetectionCreate.g.cs @@ -87,7 +87,7 @@ partial void ProcessImageImageLogoDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -124,13 +124,13 @@ partial void ProcessImageImageLogoDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -162,13 +162,13 @@ partial void ProcessImageImageLogoDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -200,13 +200,13 @@ partial void ProcessImageImageLogoDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -238,13 +238,13 @@ partial void ProcessImageImageLogoDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -288,7 +288,7 @@ partial void ProcessImageImageLogoDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagelogoDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagelogoDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -318,7 +318,7 @@ partial void ProcessImageImageLogoDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagelogoDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagelogoDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.g.cs index a498c15..99cbfd4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.LogoDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class LogoDetectionClient : global::EdenAI.ILogoDetectionC /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.AudiospeechToTextAsyncModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.AudiospeechToTextAsyncModel.g.cs index 0262565..f49238f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.AudiospeechToTextAsyncModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.AudiospeechToTextAsyncModel.g.cs @@ -11,14 +11,8 @@ public sealed partial class AudiospeechToTextAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("speechmatics")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Speechmatics { get; set; } - - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("assembly")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Assembly { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("deepgram")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Deepgram { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -29,56 +23,56 @@ public sealed partial class AudiospeechToTextAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("voci")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Voci { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("speechmatics")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Speechmatics { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("faker")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Faker { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("deepgram")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Deepgram { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("gladia")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Gladia { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("voxist")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Voxist { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("faker")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Faker { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("gladia")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Gladia { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Oneai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("assembly")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Assembly { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Oneai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("voci")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Voci { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("voxist")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Voxist { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -86,6 +80,12 @@ public sealed partial class AudiospeechToTextAsyncModel [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Microsoft { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? Google { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -95,76 +95,76 @@ public sealed partial class AudiospeechToTextAsyncModel /// /// Initializes a new instance of the class. /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public AudiospeechToTextAsyncModel( - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? speechmatics, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? assembly, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? symbl, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? voci, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? amazon, global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? deepgram, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? voxist, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? symbl, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? speechmatics, global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? faker, global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? gladia, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? google, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? oneai, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? amazon, global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? openai, - global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? microsoft) + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? oneai, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? assembly, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? voci, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? voxist, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? microsoft, + global::EdenAI.AudiospeechToTextAsyncSpeechToTextAsyncDataClass? google) { - this.Speechmatics = speechmatics; - this.Assembly = assembly; - this.Symbl = symbl; - this.Voci = voci; - this.Amazon = amazon; this.Deepgram = deepgram; - this.Voxist = voxist; + this.Symbl = symbl; + this.Speechmatics = speechmatics; this.Faker = faker; this.Gladia = gladia; - this.Google = google; - this.Oneai = oneai; + this.Amazon = amazon; this.Openai = openai; + this.Oneai = oneai; + this.Assembly = assembly; + this.Voci = voci; + this.Voxist = voxist; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.AudiotextToSpeechResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.AudiotextToSpeechResponseModel.g.cs index 24644c9..98cab8d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.AudiotextToSpeechResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.AudiotextToSpeechResponseModel.g.cs @@ -11,26 +11,26 @@ public sealed partial class AudiotextToSpeechResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("lovoai")] + public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Lovoai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("elevenlabs")] - public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Elevenlabs { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("deepgram")] + public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Deepgram { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("deepgram")] - public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Deepgram { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("elevenlabs")] + public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Elevenlabs { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -41,14 +41,14 @@ public sealed partial class AudiotextToSpeechResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("lovoai")] - public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Lovoai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -59,46 +59,46 @@ public sealed partial class AudiotextToSpeechResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public AudiotextToSpeechResponseModel( - global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? amazon, - global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? elevenlabs, + global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? lovoai, global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? deepgram, - global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? google, + global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? elevenlabs, + global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? amazon, global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? openai, - global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? lovoai, - global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? microsoft) + global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? microsoft, + global::EdenAI.AudiotextToSpeechTextToSpeechDataClass? google) { - this.Amazon = amazon; - this.Elevenlabs = elevenlabs; + this.Lovoai = lovoai; this.Deepgram = deepgram; - this.Google = google; + this.Elevenlabs = elevenlabs; + this.Amazon = amazon; this.Openai = openai; - this.Lovoai = lovoai; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagebackgroundRemovalResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagebackgroundRemovalResponseModel.g.cs index 3ea2a07..c89fc78 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagebackgroundRemovalResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagebackgroundRemovalResponseModel.g.cs @@ -11,38 +11,38 @@ public sealed partial class ImagebackgroundRemovalResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("clipdrop")] - public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Clipdrop { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("photoroom")] + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Photoroom { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] - public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Api4ai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Sentisight { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("picsart")] - public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Picsart { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("stabilityai")] + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Stabilityai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("photoroom")] - public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Photoroom { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Api4ai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("stabilityai")] - public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Stabilityai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("clipdrop")] + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Clipdrop { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] - public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Sentisight { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("picsart")] + public global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? Picsart { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -59,22 +59,22 @@ public sealed partial class ImagebackgroundRemovalResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// @@ -84,20 +84,20 @@ public sealed partial class ImagebackgroundRemovalResponseModel [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImagebackgroundRemovalResponseModel( - global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? clipdrop, - global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? api4ai, - global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? picsart, global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? photoroom, - global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? stabilityai, global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? sentisight, + global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? stabilityai, + global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? api4ai, + global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? clipdrop, + global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? picsart, global::EdenAI.ImagebackgroundRemovalBackgroundRemovalDataClass? microsoft) { - this.Clipdrop = clipdrop; - this.Api4ai = api4ai; - this.Picsart = picsart; this.Photoroom = photoroom; - this.Stabilityai = stabilityai; this.Sentisight = sentisight; + this.Stabilityai = stabilityai; + this.Api4ai = api4ai; + this.Clipdrop = clipdrop; + this.Picsart = picsart; this.Microsoft = microsoft; } diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImageembeddingsResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImageembeddingsResponseModel.g.cs index fa0114b..cc66b71 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImageembeddingsResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImageembeddingsResponseModel.g.cs @@ -11,20 +11,20 @@ public sealed partial class ImageembeddingsResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.ImageembeddingsEmbeddingsDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("alephalpha")] + public global::EdenAI.ImageembeddingsEmbeddingsDataClass? Alephalpha { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.ImageembeddingsEmbeddingsDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.ImageembeddingsEmbeddingsDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("alephalpha")] - public global::EdenAI.ImageembeddingsEmbeddingsDataClass? Alephalpha { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.ImageembeddingsEmbeddingsDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -35,26 +35,26 @@ public sealed partial class ImageembeddingsResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImageembeddingsResponseModel( + global::EdenAI.ImageembeddingsEmbeddingsDataClass? alephalpha, global::EdenAI.ImageembeddingsEmbeddingsDataClass? amazon, - global::EdenAI.ImageembeddingsEmbeddingsDataClass? google, - global::EdenAI.ImageembeddingsEmbeddingsDataClass? alephalpha) + global::EdenAI.ImageembeddingsEmbeddingsDataClass? google) { + this.Alephalpha = alephalpha; this.Amazon = amazon; this.Google = google; - this.Alephalpha = alephalpha; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImageexplicitContentResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImageexplicitContentResponseModel.g.cs index 99e385b..0af1701 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImageexplicitContentResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImageexplicitContentResponseModel.g.cs @@ -11,44 +11,44 @@ public sealed partial class ImageexplicitContentResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] - public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Api4ai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Sentisight { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Api4ai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] - public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Clarifai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] - public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Sentisight { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Clarifai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.ImageexplicitContentExplicitContentDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -59,46 +59,46 @@ public sealed partial class ImageexplicitContentResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImageexplicitContentResponseModel( + global::EdenAI.ImageexplicitContentExplicitContentDataClass? sentisight, global::EdenAI.ImageexplicitContentExplicitContentDataClass? api4ai, global::EdenAI.ImageexplicitContentExplicitContentDataClass? amazon, - global::EdenAI.ImageexplicitContentExplicitContentDataClass? clarifai, - global::EdenAI.ImageexplicitContentExplicitContentDataClass? google, - global::EdenAI.ImageexplicitContentExplicitContentDataClass? sentisight, global::EdenAI.ImageexplicitContentExplicitContentDataClass? openai, - global::EdenAI.ImageexplicitContentExplicitContentDataClass? microsoft) + global::EdenAI.ImageexplicitContentExplicitContentDataClass? microsoft, + global::EdenAI.ImageexplicitContentExplicitContentDataClass? clarifai, + global::EdenAI.ImageexplicitContentExplicitContentDataClass? google) { + this.Sentisight = sentisight; this.Api4ai = api4ai; this.Amazon = amazon; - this.Clarifai = clarifai; - this.Google = google; - this.Sentisight = sentisight; this.Openai = openai; this.Microsoft = microsoft; + this.Clarifai = clarifai; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceCompareResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceCompareResponseModel.g.cs index b3d9dcc..80809b7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceCompareResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceCompareResponseModel.g.cs @@ -11,20 +11,20 @@ public sealed partial class ImagefaceCompareResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.ImagefaceCompareFaceCompareDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("base64")] + public global::EdenAI.ImagefaceCompareFaceCompareDataClass? Base64 { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("facepp")] - public global::EdenAI.ImagefaceCompareFaceCompareDataClass? Facepp { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.ImagefaceCompareFaceCompareDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("base64")] - public global::EdenAI.ImagefaceCompareFaceCompareDataClass? Base64 { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("facepp")] + public global::EdenAI.ImagefaceCompareFaceCompareDataClass? Facepp { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -35,26 +35,26 @@ public sealed partial class ImagefaceCompareResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImagefaceCompareResponseModel( + global::EdenAI.ImagefaceCompareFaceCompareDataClass? base64, global::EdenAI.ImagefaceCompareFaceCompareDataClass? amazon, - global::EdenAI.ImagefaceCompareFaceCompareDataClass? facepp, - global::EdenAI.ImagefaceCompareFaceCompareDataClass? base64) + global::EdenAI.ImagefaceCompareFaceCompareDataClass? facepp) { + this.Base64 = base64; this.Amazon = amazon; this.Facepp = facepp; - this.Base64 = base64; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceDetectionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceDetectionResponseModel.g.cs index 2d47b1b..329164c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceDetectionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceDetectionResponseModel.g.cs @@ -23,20 +23,20 @@ public sealed partial class ImagefaceDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] - public global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? Clarifai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] + public global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? Clarifai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -53,13 +53,13 @@ public sealed partial class ImagefaceDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER @@ -68,15 +68,15 @@ public sealed partial class ImagefaceDetectionResponseModel public ImagefaceDetectionResponseModel( global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? api4ai, global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? amazon, + global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? microsoft, global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? clarifai, - global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? google, - global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? microsoft) + global::EdenAI.ImagefaceDetectionFaceDetectionDataClass? google) { this.Api4ai = api4ai; this.Amazon = amazon; + this.Microsoft = microsoft; this.Clarifai = clarifai; this.Google = google; - this.Microsoft = microsoft; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceRecognitionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceRecognitionResponseModel.g.cs index a34c20e..2f1ae54 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceRecognitionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagefaceRecognitionResponseModel.g.cs @@ -17,14 +17,14 @@ public sealed partial class ImagefaceRecognitionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("facepp")] - public global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? Facepp { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("facepp")] + public global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? Facepp { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -38,10 +38,10 @@ public sealed partial class ImagefaceRecognitionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER @@ -49,12 +49,12 @@ public sealed partial class ImagefaceRecognitionResponseModel #endif public ImagefaceRecognitionResponseModel( global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? amazon, - global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? facepp, - global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? microsoft) + global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? microsoft, + global::EdenAI.ImagefaceRecognitionFaceRecognitionAddFaceDataClass? facepp) { this.Amazon = amazon; - this.Facepp = facepp; this.Microsoft = microsoft; + this.Facepp = facepp; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagegenerationResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagegenerationResponseModel.g.cs index b75f9b1..836b946 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagegenerationResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagegenerationResponseModel.g.cs @@ -17,26 +17,26 @@ public sealed partial class ImagegenerationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.ImagegenerationGenerationDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("leonardo")] + public global::EdenAI.ImagegenerationGenerationDataClass? Leonardo { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("minimax")] - public global::EdenAI.ImagegenerationGenerationDataClass? Minimax { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("stabilityai")] + public global::EdenAI.ImagegenerationGenerationDataClass? Stabilityai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("stabilityai")] - public global::EdenAI.ImagegenerationGenerationDataClass? Stabilityai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.ImagegenerationGenerationDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("leonardo")] - public global::EdenAI.ImagegenerationGenerationDataClass? Leonardo { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.ImagegenerationGenerationDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -47,14 +47,14 @@ public sealed partial class ImagegenerationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("replicate")] - public global::EdenAI.ImagegenerationGenerationDataClass? Replicate { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("minimax")] + public global::EdenAI.ImagegenerationGenerationDataClass? Minimax { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.ImagegenerationGenerationDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("replicate")] + public global::EdenAI.ImagegenerationGenerationDataClass? Replicate { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -68,25 +68,25 @@ public sealed partial class ImagegenerationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER @@ -94,22 +94,22 @@ public sealed partial class ImagegenerationResponseModel #endif public ImagegenerationResponseModel( global::EdenAI.ImagegenerationGenerationDataClass? deepai, - global::EdenAI.ImagegenerationGenerationDataClass? amazon, - global::EdenAI.ImagegenerationGenerationDataClass? minimax, - global::EdenAI.ImagegenerationGenerationDataClass? stabilityai, global::EdenAI.ImagegenerationGenerationDataClass? leonardo, + global::EdenAI.ImagegenerationGenerationDataClass? stabilityai, + global::EdenAI.ImagegenerationGenerationDataClass? amazon, + global::EdenAI.ImagegenerationGenerationDataClass? openai, global::EdenAI.ImagegenerationGenerationDataClass? bytedance, - global::EdenAI.ImagegenerationGenerationDataClass? replicate, - global::EdenAI.ImagegenerationGenerationDataClass? openai) + global::EdenAI.ImagegenerationGenerationDataClass? minimax, + global::EdenAI.ImagegenerationGenerationDataClass? replicate) { this.Deepai = deepai; - this.Amazon = amazon; - this.Minimax = minimax; - this.Stabilityai = stabilityai; this.Leonardo = leonardo; + this.Stabilityai = stabilityai; + this.Amazon = amazon; + this.Openai = openai; this.Bytedance = bytedance; + this.Minimax = minimax; this.Replicate = replicate; - this.Openai = openai; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagelandmarkDetectionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagelandmarkDetectionResponseModel.g.cs index a7930e7..45e2318 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagelandmarkDetectionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagelandmarkDetectionResponseModel.g.cs @@ -11,14 +11,14 @@ public sealed partial class ImagelandmarkDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -29,21 +29,21 @@ public sealed partial class ImagelandmarkDetectionResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImagelandmarkDetectionResponseModel( - global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? google, - global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? microsoft) + global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? microsoft, + global::EdenAI.ImagelandmarkDetectionLandmarkDetectionDataClass? google) { - this.Google = google; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagelogoDetectionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagelogoDetectionResponseModel.g.cs index 23f2b00..655c77d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagelogoDetectionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagelogoDetectionResponseModel.g.cs @@ -8,24 +8,12 @@ namespace EdenAI /// public sealed partial class ImagelogoDetectionResponseModel { - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] - public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Api4ai { get; set; } - /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// [global::System.Text.Json.Serialization.JsonPropertyName("smartclick")] public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Smartclick { get; set; } - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] - public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Clarifai { get; set; } - /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -35,8 +23,8 @@ public sealed partial class ImagelogoDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] + public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Api4ai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -50,6 +38,18 @@ public sealed partial class ImagelogoDetectionResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Microsoft { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] + public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Clarifai { get; set; } + + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? Google { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -59,19 +59,13 @@ public sealed partial class ImagelogoDetectionResponseModel /// /// Initializes a new instance of the class. /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// @@ -80,25 +74,31 @@ public sealed partial class ImagelogoDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImagelogoDetectionResponseModel( - global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? api4ai, global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? smartclick, - global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? clarifai, global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? anthropic, - global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? google, + global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? api4ai, global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? openai, - global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? microsoft) + global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? microsoft, + global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? clarifai, + global::EdenAI.ImagelogoDetectionLogoDetectionDataClass? google) { - this.Api4ai = api4ai; this.Smartclick = smartclick; - this.Clarifai = clarifai; this.Anthropic = anthropic; - this.Google = google; + this.Api4ai = api4ai; this.Openai = openai; this.Microsoft = microsoft; + this.Clarifai = clarifai; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImageobjectDetectionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImageobjectDetectionResponseModel.g.cs index 14ff125..7a44c67 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImageobjectDetectionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImageobjectDetectionResponseModel.g.cs @@ -11,38 +11,38 @@ public sealed partial class ImageobjectDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] - public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Api4ai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] + public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Sentisight { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] + public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Api4ai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] - public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Clarifai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] - public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Sentisight { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] + public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Clarifai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -53,41 +53,41 @@ public sealed partial class ImageobjectDetectionResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImageobjectDetectionResponseModel( + global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? sentisight, global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? api4ai, global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? amazon, + global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? microsoft, global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? clarifai, - global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? google, - global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? sentisight, - global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? microsoft) + global::EdenAI.ImageobjectDetectionObjectDetectionDataClass? google) { + this.Sentisight = sentisight; this.Api4ai = api4ai; this.Amazon = amazon; + this.Microsoft = microsoft; this.Clarifai = clarifai; this.Google = google; - this.Sentisight = sentisight; - this.Microsoft = microsoft; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagequestionAnswerResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagequestionAnswerResponseModel.g.cs index 195f7e5..17ec197 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagequestionAnswerResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagequestionAnswerResponseModel.g.cs @@ -11,20 +11,20 @@ public sealed partial class ImagequestionAnswerResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("alephalpha")] + public global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? Alephalpha { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("alephalpha")] - public global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? Alephalpha { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -35,26 +35,26 @@ public sealed partial class ImagequestionAnswerResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImagequestionAnswerResponseModel( + global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? alephalpha, global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? openai, - global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? google, - global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? alephalpha) + global::EdenAI.ImagequestionAnswerQuestionAnswerDataClass? google) { + this.Alephalpha = alephalpha; this.Openai = openai; this.Google = google; - this.Alephalpha = alephalpha; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.ImagesearchResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.ImagesearchResponseModel.g.cs index 344c8a8..8ea2f5c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.ImagesearchResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.ImagesearchResponseModel.g.cs @@ -11,14 +11,14 @@ public sealed partial class ImagesearchResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] - public global::EdenAI.ImagesearchSearchDeleteImageDataClass? Sentisight { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("nyckel")] + public global::EdenAI.ImagesearchSearchDeleteImageDataClass? Nyckel { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("nyckel")] - public global::EdenAI.ImagesearchSearchDeleteImageDataClass? Nyckel { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] + public global::EdenAI.ImagesearchSearchDeleteImageDataClass? Sentisight { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -29,21 +29,21 @@ public sealed partial class ImagesearchResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public ImagesearchResponseModel( - global::EdenAI.ImagesearchSearchDeleteImageDataClass? sentisight, - global::EdenAI.ImagesearchSearchDeleteImageDataClass? nyckel) + global::EdenAI.ImagesearchSearchDeleteImageDataClass? nyckel, + global::EdenAI.ImagesearchSearchDeleteImageDataClass? sentisight) { - this.Sentisight = sentisight; this.Nyckel = nyckel; + this.Sentisight = sentisight; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.LlmchatResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.LlmchatResponseModel.g.cs index 2693cd4..b781bc0 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.LlmchatResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.LlmchatResponseModel.g.cs @@ -11,164 +11,164 @@ public sealed partial class LlmchatResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("dashscope")] - public global::EdenAI.LlmchatChatDataClass? Dashscope { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("deepinfra")] + public global::EdenAI.LlmchatChatDataClass? Deepinfra { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] - public global::EdenAI.LlmchatChatDataClass? Cohere { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.LlmchatChatDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("together_ai")] - public global::EdenAI.LlmchatChatDataClass? TogetherAi { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("huggingface")] + public global::EdenAI.LlmchatChatDataClass? Huggingface { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("deepinfra")] - public global::EdenAI.LlmchatChatDataClass? Deepinfra { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("together_ai")] + public global::EdenAI.LlmchatChatDataClass? TogetherAi { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("iointelligence")] - public global::EdenAI.LlmchatChatDataClass? Iointelligence { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.LlmchatChatDataClass? Google { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("cloudflare")] - public global::EdenAI.LlmchatChatDataClass? Cloudflare { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("databricks")] + public global::EdenAI.LlmchatChatDataClass? Databricks { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.LlmchatChatDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("deepseek")] + public global::EdenAI.LlmchatChatDataClass? Deepseek { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("perplexityai")] - public global::EdenAI.LlmchatChatDataClass? Perplexityai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("groq")] + public global::EdenAI.LlmchatChatDataClass? Groq { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("replicate")] - public global::EdenAI.LlmchatChatDataClass? Replicate { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.LlmchatChatDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("meta")] - public global::EdenAI.LlmchatChatDataClass? Meta { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("ovhcloud")] + public global::EdenAI.LlmchatChatDataClass? Ovhcloud { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("mistral")] - public global::EdenAI.LlmchatChatDataClass? Mistral { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] + public global::EdenAI.LlmchatChatDataClass? Tenstorrent { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.LlmchatChatDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("dashscope")] + public global::EdenAI.LlmchatChatDataClass? Dashscope { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("groq")] - public global::EdenAI.LlmchatChatDataClass? Groq { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("cloudflare")] + public global::EdenAI.LlmchatChatDataClass? Cloudflare { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("bytedance")] - public global::EdenAI.LlmchatChatDataClass? Bytedance { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("nebius")] + public global::EdenAI.LlmchatChatDataClass? Nebius { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("deepseek")] - public global::EdenAI.LlmchatChatDataClass? Deepseek { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("iointelligence")] + public global::EdenAI.LlmchatChatDataClass? Iointelligence { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("fireworks_ai")] - public global::EdenAI.LlmchatChatDataClass? FireworksAi { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.LlmchatChatDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.LlmchatChatDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("cerebras")] + public global::EdenAI.LlmchatChatDataClass? Cerebras { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("databricks")] - public global::EdenAI.LlmchatChatDataClass? Databricks { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("bytedance")] + public global::EdenAI.LlmchatChatDataClass? Bytedance { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("nebius")] - public global::EdenAI.LlmchatChatDataClass? Nebius { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] + public global::EdenAI.LlmchatChatDataClass? Cohere { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.LlmchatChatDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("replicate")] + public global::EdenAI.LlmchatChatDataClass? Replicate { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("xai")] - public global::EdenAI.LlmchatChatDataClass? Xai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("fireworks_ai")] + public global::EdenAI.LlmchatChatDataClass? FireworksAi { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("ovhcloud")] - public global::EdenAI.LlmchatChatDataClass? Ovhcloud { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("xai")] + public global::EdenAI.LlmchatChatDataClass? Xai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("huggingface")] - public global::EdenAI.LlmchatChatDataClass? Huggingface { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("anthropic")] + public global::EdenAI.LlmchatChatDataClass? Anthropic { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("minimax")] - public global::EdenAI.LlmchatChatDataClass? Minimax { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("mistral")] + public global::EdenAI.LlmchatChatDataClass? Mistral { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] - public global::EdenAI.LlmchatChatDataClass? Tenstorrent { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("perplexityai")] + public global::EdenAI.LlmchatChatDataClass? Perplexityai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("anthropic")] - public global::EdenAI.LlmchatChatDataClass? Anthropic { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("minimax")] + public global::EdenAI.LlmchatChatDataClass? Minimax { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("cerebras")] - public global::EdenAI.LlmchatChatDataClass? Cerebras { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("meta")] + public global::EdenAI.LlmchatChatDataClass? Meta { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -179,146 +179,146 @@ public sealed partial class LlmchatResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public LlmchatResponseModel( - global::EdenAI.LlmchatChatDataClass? dashscope, - global::EdenAI.LlmchatChatDataClass? cohere, - global::EdenAI.LlmchatChatDataClass? togetherAi, global::EdenAI.LlmchatChatDataClass? deepinfra, - global::EdenAI.LlmchatChatDataClass? iointelligence, - global::EdenAI.LlmchatChatDataClass? cloudflare, + global::EdenAI.LlmchatChatDataClass? microsoft, + global::EdenAI.LlmchatChatDataClass? huggingface, + global::EdenAI.LlmchatChatDataClass? togetherAi, global::EdenAI.LlmchatChatDataClass? google, - global::EdenAI.LlmchatChatDataClass? perplexityai, - global::EdenAI.LlmchatChatDataClass? replicate, - global::EdenAI.LlmchatChatDataClass? meta, - global::EdenAI.LlmchatChatDataClass? mistral, - global::EdenAI.LlmchatChatDataClass? amazon, + global::EdenAI.LlmchatChatDataClass? databricks, + global::EdenAI.LlmchatChatDataClass? deepseek, global::EdenAI.LlmchatChatDataClass? groq, + global::EdenAI.LlmchatChatDataClass? amazon, + global::EdenAI.LlmchatChatDataClass? ovhcloud, + global::EdenAI.LlmchatChatDataClass? tenstorrent, + global::EdenAI.LlmchatChatDataClass? dashscope, + global::EdenAI.LlmchatChatDataClass? cloudflare, + global::EdenAI.LlmchatChatDataClass? nebius, + global::EdenAI.LlmchatChatDataClass? iointelligence, + global::EdenAI.LlmchatChatDataClass? openai, + global::EdenAI.LlmchatChatDataClass? cerebras, global::EdenAI.LlmchatChatDataClass? bytedance, - global::EdenAI.LlmchatChatDataClass? deepseek, + global::EdenAI.LlmchatChatDataClass? cohere, + global::EdenAI.LlmchatChatDataClass? replicate, global::EdenAI.LlmchatChatDataClass? fireworksAi, - global::EdenAI.LlmchatChatDataClass? openai, - global::EdenAI.LlmchatChatDataClass? databricks, - global::EdenAI.LlmchatChatDataClass? nebius, - global::EdenAI.LlmchatChatDataClass? microsoft, global::EdenAI.LlmchatChatDataClass? xai, - global::EdenAI.LlmchatChatDataClass? ovhcloud, - global::EdenAI.LlmchatChatDataClass? huggingface, - global::EdenAI.LlmchatChatDataClass? minimax, - global::EdenAI.LlmchatChatDataClass? tenstorrent, global::EdenAI.LlmchatChatDataClass? anthropic, - global::EdenAI.LlmchatChatDataClass? cerebras) + global::EdenAI.LlmchatChatDataClass? mistral, + global::EdenAI.LlmchatChatDataClass? perplexityai, + global::EdenAI.LlmchatChatDataClass? minimax, + global::EdenAI.LlmchatChatDataClass? meta) { - this.Dashscope = dashscope; - this.Cohere = cohere; - this.TogetherAi = togetherAi; this.Deepinfra = deepinfra; - this.Iointelligence = iointelligence; - this.Cloudflare = cloudflare; + this.Microsoft = microsoft; + this.Huggingface = huggingface; + this.TogetherAi = togetherAi; this.Google = google; - this.Perplexityai = perplexityai; - this.Replicate = replicate; - this.Meta = meta; - this.Mistral = mistral; - this.Amazon = amazon; + this.Databricks = databricks; + this.Deepseek = deepseek; this.Groq = groq; + this.Amazon = amazon; + this.Ovhcloud = ovhcloud; + this.Tenstorrent = tenstorrent; + this.Dashscope = dashscope; + this.Cloudflare = cloudflare; + this.Nebius = nebius; + this.Iointelligence = iointelligence; + this.Openai = openai; + this.Cerebras = cerebras; this.Bytedance = bytedance; - this.Deepseek = deepseek; + this.Cohere = cohere; + this.Replicate = replicate; this.FireworksAi = fireworksAi; - this.Openai = openai; - this.Databricks = databricks; - this.Nebius = nebius; - this.Microsoft = microsoft; this.Xai = xai; - this.Ovhcloud = ovhcloud; - this.Huggingface = huggingface; - this.Minimax = minimax; - this.Tenstorrent = tenstorrent; this.Anthropic = anthropic; - this.Cerebras = cerebras; + this.Mistral = mistral; + this.Perplexityai = perplexityai; + this.Minimax = minimax; + this.Meta = meta; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.MultimodalchatResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.MultimodalchatResponseModel.g.cs index 4c5eec0..d8b9eb6 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.MultimodalchatResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.MultimodalchatResponseModel.g.cs @@ -14,12 +14,6 @@ public sealed partial class MultimodalchatResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("xai")] public global::EdenAI.MultimodalchatChatDataClass? Xai { get; set; } - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.MultimodalchatChatDataClass? Amazon { get; set; } - /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -29,8 +23,8 @@ public sealed partial class MultimodalchatResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.MultimodalchatChatDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.MultimodalchatChatDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -50,6 +44,12 @@ public sealed partial class MultimodalchatResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] public global::EdenAI.MultimodalchatChatDataClass? Microsoft { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.MultimodalchatChatDataClass? Google { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -62,13 +62,10 @@ public sealed partial class MultimodalchatResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// @@ -80,25 +77,28 @@ public sealed partial class MultimodalchatResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public MultimodalchatResponseModel( global::EdenAI.MultimodalchatChatDataClass? xai, - global::EdenAI.MultimodalchatChatDataClass? amazon, global::EdenAI.MultimodalchatChatDataClass? anthropic, - global::EdenAI.MultimodalchatChatDataClass? google, + global::EdenAI.MultimodalchatChatDataClass? amazon, global::EdenAI.MultimodalchatChatDataClass? openai, global::EdenAI.MultimodalchatChatDataClass? mistral, - global::EdenAI.MultimodalchatChatDataClass? microsoft) + global::EdenAI.MultimodalchatChatDataClass? microsoft, + global::EdenAI.MultimodalchatChatDataClass? google) { this.Xai = xai; - this.Amazon = amazon; this.Anthropic = anthropic; - this.Google = google; + this.Amazon = amazon; this.Openai = openai; this.Mistral = mistral; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcranonymizationAsyncModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcranonymizationAsyncModel.g.cs index 959eeaa..09004c8 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcranonymizationAsyncModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcranonymizationAsyncModel.g.cs @@ -11,8 +11,8 @@ public sealed partial class OcranonymizationAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("readyredact")] - public global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? Readyredact { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("base64")] + public global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? Base64 { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -23,8 +23,8 @@ public sealed partial class OcranonymizationAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("base64")] - public global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? Base64 { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("readyredact")] + public global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? Readyredact { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -35,26 +35,26 @@ public sealed partial class OcranonymizationAsyncModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcranonymizationAsyncModel( - global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? readyredact, + global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? base64, global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? privateai, - global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? base64) + global::EdenAI.OcranonymizationAsyncAnonymizationAsyncDataClass? readyredact) { - this.Readyredact = readyredact; - this.Privateai = privateai; this.Base64 = base64; + this.Privateai = privateai; + this.Readyredact = readyredact; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcrbankCheckParsingResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcrbankCheckParsingResponseModel.g.cs index 562d307..bdcf55b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcrbankCheckParsingResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcrbankCheckParsingResponseModel.g.cs @@ -8,6 +8,12 @@ namespace EdenAI /// public sealed partial class OcrbankCheckParsingResponseModel { + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("base64")] + public global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? Base64 { get; set; } + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -26,12 +32,6 @@ public sealed partial class OcrbankCheckParsingResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("extracta")] public global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? Extracta { get; set; } - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("base64")] - public global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? Base64 { get; set; } - /// /// Additional properties that are not explicitly defined in the schema /// @@ -41,6 +41,9 @@ public sealed partial class OcrbankCheckParsingResponseModel /// /// Initializes a new instance of the class. /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -50,22 +53,19 @@ public sealed partial class OcrbankCheckParsingResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcrbankCheckParsingResponseModel( + global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? base64, global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? veryfi, global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? mindee, - global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? extracta, - global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? base64) + global::EdenAI.OcrbankCheckParsingBankCheckParsingDataClass? extracta) { + this.Base64 = base64; this.Veryfi = veryfi; this.Mindee = mindee; this.Extracta = extracta; - this.Base64 = base64; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcrdataExtractionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcrdataExtractionResponseModel.g.cs index 0aae918..c074fb4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcrdataExtractionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcrdataExtractionResponseModel.g.cs @@ -11,14 +11,14 @@ public sealed partial class OcrdataExtractionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.OcrdataExtractionDataExtractionDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("base64")] + public global::EdenAI.OcrdataExtractionDataExtractionDataClass? Base64 { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("base64")] - public global::EdenAI.OcrdataExtractionDataExtractionDataClass? Base64 { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.OcrdataExtractionDataExtractionDataClass? Amazon { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -29,21 +29,21 @@ public sealed partial class OcrdataExtractionResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcrdataExtractionResponseModel( - global::EdenAI.OcrdataExtractionDataExtractionDataClass? amazon, - global::EdenAI.OcrdataExtractionDataExtractionDataClass? base64) + global::EdenAI.OcrdataExtractionDataExtractionDataClass? base64, + global::EdenAI.OcrdataExtractionDataExtractionDataClass? amazon) { - this.Amazon = amazon; this.Base64 = base64; + this.Amazon = amazon; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcrfinancialParserResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcrfinancialParserResponseModel.g.cs index 3b9fa71..8f73429 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcrfinancialParserResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcrfinancialParserResponseModel.g.cs @@ -11,32 +11,32 @@ public sealed partial class OcrfinancialParserResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("veryfi")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Veryfi { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("base64")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Base64 { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("eagledoc")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Eagledoc { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("klippa")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Klippa { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("mindee")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Mindee { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("dataleon")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Dataleon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("veryfi")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Veryfi { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("tabscanner")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Tabscanner { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("eagledoc")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Eagledoc { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -47,20 +47,20 @@ public sealed partial class OcrfinancialParserResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("affinda")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Affinda { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("mindee")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Mindee { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("base64")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Base64 { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("tabscanner")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Tabscanner { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -71,20 +71,20 @@ public sealed partial class OcrfinancialParserResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("klippa")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Klippa { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("affinda")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Affinda { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("dataleon")] - public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Dataleon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.OcrfinancialParserFinancialParserDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -95,76 +95,76 @@ public sealed partial class OcrfinancialParserResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcrfinancialParserResponseModel( + global::EdenAI.OcrfinancialParserFinancialParserDataClass? base64, + global::EdenAI.OcrfinancialParserFinancialParserDataClass? klippa, + global::EdenAI.OcrfinancialParserFinancialParserDataClass? dataleon, global::EdenAI.OcrfinancialParserFinancialParserDataClass? veryfi, global::EdenAI.OcrfinancialParserFinancialParserDataClass? eagledoc, + global::EdenAI.OcrfinancialParserFinancialParserDataClass? amazon, global::EdenAI.OcrfinancialParserFinancialParserDataClass? mindee, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? microsoft, + global::EdenAI.OcrfinancialParserFinancialParserDataClass? openai, global::EdenAI.OcrfinancialParserFinancialParserDataClass? tabscanner, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? amazon, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? affinda, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? base64, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? google, global::EdenAI.OcrfinancialParserFinancialParserDataClass? extracta, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? openai, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? klippa, - global::EdenAI.OcrfinancialParserFinancialParserDataClass? dataleon) + global::EdenAI.OcrfinancialParserFinancialParserDataClass? microsoft, + global::EdenAI.OcrfinancialParserFinancialParserDataClass? affinda, + global::EdenAI.OcrfinancialParserFinancialParserDataClass? google) { + this.Base64 = base64; + this.Klippa = klippa; + this.Dataleon = dataleon; this.Veryfi = veryfi; this.Eagledoc = eagledoc; + this.Amazon = amazon; this.Mindee = mindee; - this.Microsoft = microsoft; + this.Openai = openai; this.Tabscanner = tabscanner; - this.Amazon = amazon; + this.Extracta = extracta; + this.Microsoft = microsoft; this.Affinda = affinda; - this.Base64 = base64; this.Google = google; - this.Extracta = extracta; - this.Openai = openai; - this.Klippa = klippa; - this.Dataleon = dataleon; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcridentityParserResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcridentityParserResponseModel.g.cs index 30b7f5c..ec2c2dc 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcridentityParserResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcridentityParserResponseModel.g.cs @@ -11,8 +11,8 @@ public sealed partial class OcridentityParserResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("mindee")] - public global::EdenAI.OcridentityParserIdentityParserDataClass? Mindee { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("base64")] + public global::EdenAI.OcridentityParserIdentityParserDataClass? Base64 { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -23,14 +23,8 @@ public sealed partial class OcridentityParserResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("affinda")] - public global::EdenAI.OcridentityParserIdentityParserDataClass? Affinda { get; set; } - - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("base64")] - public global::EdenAI.OcridentityParserIdentityParserDataClass? Base64 { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("mindee")] + public global::EdenAI.OcridentityParserIdentityParserDataClass? Mindee { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -50,6 +44,12 @@ public sealed partial class OcridentityParserResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] public global::EdenAI.OcridentityParserIdentityParserDataClass? Microsoft { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("affinda")] + public global::EdenAI.OcridentityParserIdentityParserDataClass? Affinda { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -59,16 +59,13 @@ public sealed partial class OcridentityParserResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// @@ -80,25 +77,28 @@ public sealed partial class OcridentityParserResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcridentityParserResponseModel( - global::EdenAI.OcridentityParserIdentityParserDataClass? mindee, - global::EdenAI.OcridentityParserIdentityParserDataClass? amazon, - global::EdenAI.OcridentityParserIdentityParserDataClass? affinda, global::EdenAI.OcridentityParserIdentityParserDataClass? base64, + global::EdenAI.OcridentityParserIdentityParserDataClass? amazon, + global::EdenAI.OcridentityParserIdentityParserDataClass? mindee, global::EdenAI.OcridentityParserIdentityParserDataClass? openai, global::EdenAI.OcridentityParserIdentityParserDataClass? klippa, - global::EdenAI.OcridentityParserIdentityParserDataClass? microsoft) + global::EdenAI.OcridentityParserIdentityParserDataClass? microsoft, + global::EdenAI.OcridentityParserIdentityParserDataClass? affinda) { - this.Mindee = mindee; - this.Amazon = amazon; - this.Affinda = affinda; this.Base64 = base64; + this.Amazon = amazon; + this.Mindee = mindee; this.Openai = openai; this.Klippa = klippa; this.Microsoft = microsoft; + this.Affinda = affinda; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrAsyncModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrAsyncModel.g.cs index b59192f..337d5ec 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrAsyncModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrAsyncModel.g.cs @@ -14,12 +14,6 @@ public sealed partial class OcrocrAsyncModel [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] public global::EdenAI.OcrocrAsyncOcrAsyncDataClass? Amazon { get; set; } - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.OcrocrAsyncOcrAsyncDataClass? Google { get; set; } - /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -38,6 +32,12 @@ public sealed partial class OcrocrAsyncModel [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] public global::EdenAI.OcrocrAsyncOcrAsyncDataClass? Microsoft { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.OcrocrAsyncOcrAsyncDataClass? Google { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -50,9 +50,6 @@ public sealed partial class OcrocrAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -62,21 +59,24 @@ public sealed partial class OcrocrAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcrocrAsyncModel( global::EdenAI.OcrocrAsyncOcrAsyncDataClass? amazon, - global::EdenAI.OcrocrAsyncOcrAsyncDataClass? google, global::EdenAI.OcrocrAsyncOcrAsyncDataClass? oneai, global::EdenAI.OcrocrAsyncOcrAsyncDataClass? mistral, - global::EdenAI.OcrocrAsyncOcrAsyncDataClass? microsoft) + global::EdenAI.OcrocrAsyncOcrAsyncDataClass? microsoft, + global::EdenAI.OcrocrAsyncOcrAsyncDataClass? google) { this.Amazon = amazon; - this.Google = google; this.Oneai = oneai; this.Mistral = mistral; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrResponseModel.g.cs index bf9b7aa..0c03f1f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrResponseModel.g.cs @@ -11,50 +11,50 @@ public sealed partial class OcrocrResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] - public global::EdenAI.OcrocrOcrDataClass? Api4ai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("base64")] + public global::EdenAI.OcrocrOcrDataClass? Base64 { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.OcrocrOcrDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] + public global::EdenAI.OcrocrOcrDataClass? Sentisight { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] - public global::EdenAI.OcrocrOcrDataClass? Clarifai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("api4ai")] + public global::EdenAI.OcrocrOcrDataClass? Api4ai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("base64")] - public global::EdenAI.OcrocrOcrDataClass? Base64 { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.OcrocrOcrDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.OcrocrOcrDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("mistral")] + public global::EdenAI.OcrocrOcrDataClass? Mistral { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("sentisight")] - public global::EdenAI.OcrocrOcrDataClass? Sentisight { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.OcrocrOcrDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("mistral")] - public global::EdenAI.OcrocrOcrDataClass? Mistral { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("clarifai")] + public global::EdenAI.OcrocrOcrDataClass? Clarifai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.OcrocrOcrDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.OcrocrOcrDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -65,51 +65,51 @@ public sealed partial class OcrocrResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcrocrResponseModel( - global::EdenAI.OcrocrOcrDataClass? api4ai, - global::EdenAI.OcrocrOcrDataClass? amazon, - global::EdenAI.OcrocrOcrDataClass? clarifai, global::EdenAI.OcrocrOcrDataClass? base64, - global::EdenAI.OcrocrOcrDataClass? google, global::EdenAI.OcrocrOcrDataClass? sentisight, + global::EdenAI.OcrocrOcrDataClass? api4ai, + global::EdenAI.OcrocrOcrDataClass? amazon, global::EdenAI.OcrocrOcrDataClass? mistral, - global::EdenAI.OcrocrOcrDataClass? microsoft) + global::EdenAI.OcrocrOcrDataClass? microsoft, + global::EdenAI.OcrocrOcrDataClass? clarifai, + global::EdenAI.OcrocrOcrDataClass? google) { - this.Api4ai = api4ai; - this.Amazon = amazon; - this.Clarifai = clarifai; this.Base64 = base64; - this.Google = google; this.Sentisight = sentisight; + this.Api4ai = api4ai; + this.Amazon = amazon; this.Mistral = mistral; this.Microsoft = microsoft; + this.Clarifai = clarifai; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrTablesAsyncModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrTablesAsyncModel.g.cs index 6ed8b77..adf51c4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrTablesAsyncModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcrocrTablesAsyncModel.g.cs @@ -17,14 +17,14 @@ public sealed partial class OcrocrTablesAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -38,10 +38,10 @@ public sealed partial class OcrocrTablesAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER @@ -49,12 +49,12 @@ public sealed partial class OcrocrTablesAsyncModel #endif public OcrocrTablesAsyncModel( global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? amazon, - global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? google, - global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? microsoft) + global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? microsoft, + global::EdenAI.OcrocrTablesAsyncOcrTablesAsyncDataClass? google) { this.Amazon = amazon; - this.Google = google; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.OcrresumeParserResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.OcrresumeParserResponseModel.g.cs index 6befa64..041ff18 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.OcrresumeParserResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.OcrresumeParserResponseModel.g.cs @@ -11,38 +11,38 @@ public sealed partial class OcrresumeParserResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("hireability")] - public global::EdenAI.OcrresumeParserResumeParserDataClass? Hireability { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("senseloaf")] + public global::EdenAI.OcrresumeParserResumeParserDataClass? Senseloaf { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("affinda")] - public global::EdenAI.OcrresumeParserResumeParserDataClass? Affinda { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("hireability")] + public global::EdenAI.OcrresumeParserResumeParserDataClass? Hireability { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("extracta")] - public global::EdenAI.OcrresumeParserResumeParserDataClass? Extracta { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.OcrresumeParserResumeParserDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("senseloaf")] - public global::EdenAI.OcrresumeParserResumeParserDataClass? Senseloaf { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("extracta")] + public global::EdenAI.OcrresumeParserResumeParserDataClass? Extracta { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.OcrresumeParserResumeParserDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("klippa")] + public global::EdenAI.OcrresumeParserResumeParserDataClass? Klippa { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("klippa")] - public global::EdenAI.OcrresumeParserResumeParserDataClass? Klippa { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("affinda")] + public global::EdenAI.OcrresumeParserResumeParserDataClass? Affinda { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -53,41 +53,41 @@ public sealed partial class OcrresumeParserResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public OcrresumeParserResponseModel( - global::EdenAI.OcrresumeParserResumeParserDataClass? hireability, - global::EdenAI.OcrresumeParserResumeParserDataClass? affinda, - global::EdenAI.OcrresumeParserResumeParserDataClass? extracta, global::EdenAI.OcrresumeParserResumeParserDataClass? senseloaf, + global::EdenAI.OcrresumeParserResumeParserDataClass? hireability, global::EdenAI.OcrresumeParserResumeParserDataClass? openai, - global::EdenAI.OcrresumeParserResumeParserDataClass? klippa) + global::EdenAI.OcrresumeParserResumeParserDataClass? extracta, + global::EdenAI.OcrresumeParserResumeParserDataClass? klippa, + global::EdenAI.OcrresumeParserResumeParserDataClass? affinda) { - this.Hireability = hireability; - this.Affinda = affinda; - this.Extracta = extracta; this.Senseloaf = senseloaf; + this.Hireability = hireability; this.Openai = openai; + this.Extracta = extracta; this.Klippa = klippa; + this.Affinda = affinda; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextanonymizationResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextanonymizationResponseModel.g.cs index 331ebc0..a0a13b2 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextanonymizationResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextanonymizationResponseModel.g.cs @@ -11,14 +11,8 @@ public sealed partial class TextanonymizationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("xai")] - public global::EdenAI.TextanonymizationAnonymizationDataClass? Xai { get; set; } - - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.TextanonymizationAnonymizationDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("privateai")] + public global::EdenAI.TextanonymizationAnonymizationDataClass? Privateai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -29,14 +23,14 @@ public sealed partial class TextanonymizationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("privateai")] - public global::EdenAI.TextanonymizationAnonymizationDataClass? Privateai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("xai")] + public global::EdenAI.TextanonymizationAnonymizationDataClass? Xai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] - public global::EdenAI.TextanonymizationAnonymizationDataClass? Oneai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.TextanonymizationAnonymizationDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -44,6 +38,12 @@ public sealed partial class TextanonymizationResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("openai")] public global::EdenAI.TextanonymizationAnonymizationDataClass? Openai { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] + public global::EdenAI.TextanonymizationAnonymizationDataClass? Oneai { get; set; } + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -59,24 +59,24 @@ public sealed partial class TextanonymizationResponseModel /// /// Initializes a new instance of the class. /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -84,20 +84,20 @@ public sealed partial class TextanonymizationResponseModel [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TextanonymizationResponseModel( + global::EdenAI.TextanonymizationAnonymizationDataClass? privateai, + global::EdenAI.TextanonymizationAnonymizationDataClass? emvista, global::EdenAI.TextanonymizationAnonymizationDataClass? xai, global::EdenAI.TextanonymizationAnonymizationDataClass? amazon, - global::EdenAI.TextanonymizationAnonymizationDataClass? emvista, - global::EdenAI.TextanonymizationAnonymizationDataClass? privateai, - global::EdenAI.TextanonymizationAnonymizationDataClass? oneai, global::EdenAI.TextanonymizationAnonymizationDataClass? openai, + global::EdenAI.TextanonymizationAnonymizationDataClass? oneai, global::EdenAI.TextanonymizationAnonymizationDataClass? microsoft) { + this.Privateai = privateai; + this.Emvista = emvista; this.Xai = xai; this.Amazon = amazon; - this.Emvista = emvista; - this.Privateai = privateai; - this.Oneai = oneai; this.Openai = openai; + this.Oneai = oneai; this.Microsoft = microsoft; } diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextchatResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextchatResponseModel.g.cs index d8ea6f2..72a55fa 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextchatResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextchatResponseModel.g.cs @@ -11,86 +11,86 @@ public sealed partial class TextchatResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("xai")] - public global::EdenAI.TextchatChatDataClass? Xai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("deepseek")] + public global::EdenAI.TextchatChatDataClass? Deepseek { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] - public global::EdenAI.TextchatChatDataClass? Cohere { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("groq")] + public global::EdenAI.TextchatChatDataClass? Groq { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("together_ai")] - public global::EdenAI.TextchatChatDataClass? TogetherAi { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("anthropic")] + public global::EdenAI.TextchatChatDataClass? Anthropic { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("meta")] - public global::EdenAI.TextchatChatDataClass? Meta { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("xai")] + public global::EdenAI.TextchatChatDataClass? Xai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.TextchatChatDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("perplexityai")] + public global::EdenAI.TextchatChatDataClass? Perplexityai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("groq")] - public global::EdenAI.TextchatChatDataClass? Groq { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.TextchatChatDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("anthropic")] - public global::EdenAI.TextchatChatDataClass? Anthropic { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TextchatChatDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("deepseek")] - public global::EdenAI.TextchatChatDataClass? Deepseek { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("mistral")] + public global::EdenAI.TextchatChatDataClass? Mistral { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.TextchatChatDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("replicate")] + public global::EdenAI.TextchatChatDataClass? Replicate { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("replicate")] - public global::EdenAI.TextchatChatDataClass? Replicate { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] + public global::EdenAI.TextchatChatDataClass? Cohere { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("perplexityai")] - public global::EdenAI.TextchatChatDataClass? Perplexityai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("together_ai")] + public global::EdenAI.TextchatChatDataClass? TogetherAi { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TextchatChatDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.TextchatChatDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("mistral")] - public global::EdenAI.TextchatChatDataClass? Mistral { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("meta")] + public global::EdenAI.TextchatChatDataClass? Meta { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.TextchatChatDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.TextchatChatDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -101,81 +101,81 @@ public sealed partial class TextchatResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TextchatResponseModel( - global::EdenAI.TextchatChatDataClass? xai, - global::EdenAI.TextchatChatDataClass? cohere, - global::EdenAI.TextchatChatDataClass? togetherAi, - global::EdenAI.TextchatChatDataClass? meta, - global::EdenAI.TextchatChatDataClass? amazon, + global::EdenAI.TextchatChatDataClass? deepseek, global::EdenAI.TextchatChatDataClass? groq, global::EdenAI.TextchatChatDataClass? anthropic, - global::EdenAI.TextchatChatDataClass? deepseek, - global::EdenAI.TextchatChatDataClass? google, - global::EdenAI.TextchatChatDataClass? replicate, + global::EdenAI.TextchatChatDataClass? xai, global::EdenAI.TextchatChatDataClass? perplexityai, + global::EdenAI.TextchatChatDataClass? amazon, global::EdenAI.TextchatChatDataClass? openai, global::EdenAI.TextchatChatDataClass? mistral, - global::EdenAI.TextchatChatDataClass? microsoft) + global::EdenAI.TextchatChatDataClass? replicate, + global::EdenAI.TextchatChatDataClass? cohere, + global::EdenAI.TextchatChatDataClass? togetherAi, + global::EdenAI.TextchatChatDataClass? microsoft, + global::EdenAI.TextchatChatDataClass? meta, + global::EdenAI.TextchatChatDataClass? google) { - this.Xai = xai; - this.Cohere = cohere; - this.TogetherAi = togetherAi; - this.Meta = meta; - this.Amazon = amazon; + this.Deepseek = deepseek; this.Groq = groq; this.Anthropic = anthropic; - this.Deepseek = deepseek; - this.Google = google; - this.Replicate = replicate; + this.Xai = xai; this.Perplexityai = perplexityai; + this.Amazon = amazon; this.Openai = openai; this.Mistral = mistral; + this.Replicate = replicate; + this.Cohere = cohere; + this.TogetherAi = togetherAi; this.Microsoft = microsoft; + this.Meta = meta; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextembeddingsResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextembeddingsResponseModel.g.cs index 587b6da..357a84a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextembeddingsResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextembeddingsResponseModel.g.cs @@ -8,18 +8,6 @@ namespace EdenAI /// public sealed partial class TextembeddingsResponseModel { - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("ai21labs")] - public global::EdenAI.TextembeddingsEmbeddingsDataClass? Ai21labs { get; set; } - - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] - public global::EdenAI.TextembeddingsEmbeddingsDataClass? Cohere { get; set; } - /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -29,8 +17,8 @@ public sealed partial class TextembeddingsResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.TextembeddingsEmbeddingsDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("ai21labs")] + public global::EdenAI.TextembeddingsEmbeddingsDataClass? Ai21labs { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -50,6 +38,18 @@ public sealed partial class TextembeddingsResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("mistral")] public global::EdenAI.TextembeddingsEmbeddingsDataClass? Mistral { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] + public global::EdenAI.TextembeddingsEmbeddingsDataClass? Cohere { get; set; } + + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.TextembeddingsEmbeddingsDataClass? Google { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -59,16 +59,10 @@ public sealed partial class TextembeddingsResponseModel /// /// Initializes a new instance of the class. /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// @@ -80,25 +74,31 @@ public sealed partial class TextembeddingsResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TextembeddingsResponseModel( - global::EdenAI.TextembeddingsEmbeddingsDataClass? ai21labs, - global::EdenAI.TextembeddingsEmbeddingsDataClass? cohere, global::EdenAI.TextembeddingsEmbeddingsDataClass? iointelligence, - global::EdenAI.TextembeddingsEmbeddingsDataClass? google, + global::EdenAI.TextembeddingsEmbeddingsDataClass? ai21labs, global::EdenAI.TextembeddingsEmbeddingsDataClass? jina, global::EdenAI.TextembeddingsEmbeddingsDataClass? openai, - global::EdenAI.TextembeddingsEmbeddingsDataClass? mistral) + global::EdenAI.TextembeddingsEmbeddingsDataClass? mistral, + global::EdenAI.TextembeddingsEmbeddingsDataClass? cohere, + global::EdenAI.TextembeddingsEmbeddingsDataClass? google) { - this.Ai21labs = ai21labs; - this.Cohere = cohere; this.Iointelligence = iointelligence; - this.Google = google; + this.Ai21labs = ai21labs; this.Jina = jina; this.Openai = openai; this.Mistral = mistral; + this.Cohere = cohere; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextkeywordExtractionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextkeywordExtractionResponseModel.g.cs index 39c577e..4b5e5e1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextkeywordExtractionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextkeywordExtractionResponseModel.g.cs @@ -11,44 +11,44 @@ public sealed partial class TextkeywordExtractionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("xai")] - public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Xai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("corticalio")] + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Corticalio { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("emvista")] + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Emvista { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] - public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Tenstorrent { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("xai")] + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Xai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("emvista")] - public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Emvista { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] - public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Oneai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("corticalio")] - public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Corticalio { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Oneai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] + public global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? Tenstorrent { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -65,25 +65,25 @@ public sealed partial class TextkeywordExtractionResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// @@ -93,22 +93,22 @@ public sealed partial class TextkeywordExtractionResponseModel [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TextkeywordExtractionResponseModel( + global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? corticalio, + global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? emvista, global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? xai, global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? amazon, - global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? tenstorrent, - global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? emvista, - global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? oneai, - global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? corticalio, global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? openai, + global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? oneai, + global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? tenstorrent, global::EdenAI.TextkeywordExtractionKeywordExtractionDataClass? microsoft) { + this.Corticalio = corticalio; + this.Emvista = emvista; this.Xai = xai; this.Amazon = amazon; - this.Tenstorrent = tenstorrent; - this.Emvista = emvista; - this.Oneai = oneai; - this.Corticalio = corticalio; this.Openai = openai; + this.Oneai = oneai; + this.Tenstorrent = tenstorrent; this.Microsoft = microsoft; } diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextmoderationResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextmoderationResponseModel.g.cs index a36a6b8..76fc338 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextmoderationResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextmoderationResponseModel.g.cs @@ -17,14 +17,14 @@ public sealed partial class TextmoderationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.TextmoderationModerationDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.TextmoderationModerationDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.TextmoderationModerationDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.TextmoderationModerationDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -38,10 +38,10 @@ public sealed partial class TextmoderationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER @@ -49,12 +49,12 @@ public sealed partial class TextmoderationResponseModel #endif public TextmoderationResponseModel( global::EdenAI.TextmoderationModerationDataClass? openai, - global::EdenAI.TextmoderationModerationDataClass? google, - global::EdenAI.TextmoderationModerationDataClass? microsoft) + global::EdenAI.TextmoderationModerationDataClass? microsoft, + global::EdenAI.TextmoderationModerationDataClass? google) { this.Openai = openai; - this.Google = google; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextnamedEntityRecognitionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextnamedEntityRecognitionResponseModel.g.cs index 0787f5a..5acf9dd 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextnamedEntityRecognitionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextnamedEntityRecognitionResponseModel.g.cs @@ -23,14 +23,8 @@ public sealed partial class TextnamedEntityRecognitionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] - public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Tenstorrent { get; set; } - - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -41,8 +35,8 @@ public sealed partial class TextnamedEntityRecognitionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] + public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Tenstorrent { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -50,6 +44,12 @@ public sealed partial class TextnamedEntityRecognitionResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Microsoft { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? Google { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -65,40 +65,40 @@ public sealed partial class TextnamedEntityRecognitionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TextnamedEntityRecognitionResponseModel( global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? xai, global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? amazon, - global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? tenstorrent, - global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? google, - global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? oneai, global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? openai, - global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? microsoft) + global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? oneai, + global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? tenstorrent, + global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? microsoft, + global::EdenAI.TextnamedEntityRecognitionNamedEntityRecognitionDataClass? google) { this.Xai = xai; this.Amazon = amazon; - this.Tenstorrent = tenstorrent; - this.Google = google; - this.Oneai = oneai; this.Openai = openai; + this.Oneai = oneai; + this.Tenstorrent = tenstorrent; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextsentimentAnalysisResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextsentimentAnalysisResponseModel.g.cs index 18f8a32..bc44574 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextsentimentAnalysisResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextsentimentAnalysisResponseModel.g.cs @@ -11,38 +11,38 @@ public sealed partial class TextsentimentAnalysisResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("xai")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Xai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("emvista")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Emvista { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("xai")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Xai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Tenstorrent { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("ibm")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Ibm { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("emvista")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Emvista { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("sapling")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Sapling { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("sapling")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Sapling { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -53,20 +53,20 @@ public sealed partial class TextsentimentAnalysisResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("ibm")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Ibm { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Tenstorrent { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -77,61 +77,61 @@ public sealed partial class TextsentimentAnalysisResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TextsentimentAnalysisResponseModel( + global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? emvista, global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? xai, + global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? ibm, global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? amazon, - global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? tenstorrent, - global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? emvista, + global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? openai, global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? sapling, - global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? google, global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? oneai, - global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? ibm, - global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? openai, - global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? microsoft) + global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? tenstorrent, + global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? microsoft, + global::EdenAI.TextsentimentAnalysisSentimentAnalysisDataClass? google) { + this.Emvista = emvista; this.Xai = xai; + this.Ibm = ibm; this.Amazon = amazon; - this.Tenstorrent = tenstorrent; - this.Emvista = emvista; + this.Openai = openai; this.Sapling = sapling; - this.Google = google; this.Oneai = oneai; - this.Ibm = ibm; - this.Openai = openai; + this.Tenstorrent = tenstorrent; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextspellCheckResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextspellCheckResponseModel.g.cs index cb2863f..d4407ed 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextspellCheckResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextspellCheckResponseModel.g.cs @@ -17,26 +17,26 @@ public sealed partial class TextspellCheckResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("prowritingaid")] - public global::EdenAI.TextspellCheckSpellCheckDataClass? Prowritingaid { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("sapling")] + public global::EdenAI.TextspellCheckSpellCheckDataClass? Sapling { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] - public global::EdenAI.TextspellCheckSpellCheckDataClass? Cohere { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TextspellCheckSpellCheckDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("sapling")] - public global::EdenAI.TextspellCheckSpellCheckDataClass? Sapling { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("prowritingaid")] + public global::EdenAI.TextspellCheckSpellCheckDataClass? Prowritingaid { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TextspellCheckSpellCheckDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] + public global::EdenAI.TextspellCheckSpellCheckDataClass? Cohere { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -56,16 +56,16 @@ public sealed partial class TextspellCheckResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// @@ -76,17 +76,17 @@ public sealed partial class TextspellCheckResponseModel #endif public TextspellCheckResponseModel( global::EdenAI.TextspellCheckSpellCheckDataClass? xai, - global::EdenAI.TextspellCheckSpellCheckDataClass? prowritingaid, - global::EdenAI.TextspellCheckSpellCheckDataClass? cohere, global::EdenAI.TextspellCheckSpellCheckDataClass? sapling, global::EdenAI.TextspellCheckSpellCheckDataClass? openai, + global::EdenAI.TextspellCheckSpellCheckDataClass? prowritingaid, + global::EdenAI.TextspellCheckSpellCheckDataClass? cohere, global::EdenAI.TextspellCheckSpellCheckDataClass? microsoft) { this.Xai = xai; - this.Prowritingaid = prowritingaid; - this.Cohere = cohere; this.Sapling = sapling; this.Openai = openai; + this.Prowritingaid = prowritingaid; + this.Cohere = cohere; this.Microsoft = microsoft; } diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TextsummarizeResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TextsummarizeResponseModel.g.cs index 79782d5..9ceb46a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TextsummarizeResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TextsummarizeResponseModel.g.cs @@ -11,62 +11,62 @@ public sealed partial class TextsummarizeResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Oneai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("emvista")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Emvista { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("xai")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Xai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("alephalpha")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Alephalpha { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Cohere { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("writesonic")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Writesonic { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("anthropic")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Anthropic { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("anthropic")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Anthropic { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("xai")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Xai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("emvista")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Emvista { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("meaningcloud")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Meaningcloud { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("writesonic")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Writesonic { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("meaningcloud")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Meaningcloud { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("oneai")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Oneai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("cohere")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Cohere { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("alephalpha")] - public global::EdenAI.TextsummarizeSummarizeDataClass? Alephalpha { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.TextsummarizeSummarizeDataClass? Microsoft { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -77,61 +77,61 @@ public sealed partial class TextsummarizeResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TextsummarizeResponseModel( - global::EdenAI.TextsummarizeSummarizeDataClass? oneai, - global::EdenAI.TextsummarizeSummarizeDataClass? xai, - global::EdenAI.TextsummarizeSummarizeDataClass? cohere, - global::EdenAI.TextsummarizeSummarizeDataClass? microsoft, - global::EdenAI.TextsummarizeSummarizeDataClass? anthropic, global::EdenAI.TextsummarizeSummarizeDataClass? emvista, + global::EdenAI.TextsummarizeSummarizeDataClass? alephalpha, global::EdenAI.TextsummarizeSummarizeDataClass? writesonic, + global::EdenAI.TextsummarizeSummarizeDataClass? anthropic, + global::EdenAI.TextsummarizeSummarizeDataClass? xai, global::EdenAI.TextsummarizeSummarizeDataClass? meaningcloud, global::EdenAI.TextsummarizeSummarizeDataClass? openai, - global::EdenAI.TextsummarizeSummarizeDataClass? alephalpha) + global::EdenAI.TextsummarizeSummarizeDataClass? oneai, + global::EdenAI.TextsummarizeSummarizeDataClass? cohere, + global::EdenAI.TextsummarizeSummarizeDataClass? microsoft) { - this.Oneai = oneai; - this.Xai = xai; - this.Cohere = cohere; - this.Microsoft = microsoft; - this.Anthropic = anthropic; this.Emvista = emvista; + this.Alephalpha = alephalpha; this.Writesonic = writesonic; + this.Anthropic = anthropic; + this.Xai = xai; this.Meaningcloud = meaningcloud; this.Openai = openai; - this.Alephalpha = alephalpha; + this.Oneai = oneai; + this.Cohere = cohere; + this.Microsoft = microsoft; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TexttopicExtractionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TexttopicExtractionResponseModel.g.cs index 7c8759a..9fc3aeb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TexttopicExtractionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TexttopicExtractionResponseModel.g.cs @@ -8,6 +8,12 @@ namespace EdenAI /// public sealed partial class TexttopicExtractionResponseModel { + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] + public global::EdenAI.TexttopicExtractionTopicExtractionDataClass? Tenstorrent { get; set; } + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -26,12 +32,6 @@ public sealed partial class TexttopicExtractionResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("google")] public global::EdenAI.TexttopicExtractionTopicExtractionDataClass? Google { get; set; } - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("tenstorrent")] - public global::EdenAI.TexttopicExtractionTopicExtractionDataClass? Tenstorrent { get; set; } - /// /// Additional properties that are not explicitly defined in the schema /// @@ -41,6 +41,9 @@ public sealed partial class TexttopicExtractionResponseModel /// /// Initializes a new instance of the class. /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// @@ -50,22 +53,19 @@ public sealed partial class TexttopicExtractionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TexttopicExtractionResponseModel( + global::EdenAI.TexttopicExtractionTopicExtractionDataClass? tenstorrent, global::EdenAI.TexttopicExtractionTopicExtractionDataClass? openai, global::EdenAI.TexttopicExtractionTopicExtractionDataClass? xai, - global::EdenAI.TexttopicExtractionTopicExtractionDataClass? google, - global::EdenAI.TexttopicExtractionTopicExtractionDataClass? tenstorrent) + global::EdenAI.TexttopicExtractionTopicExtractionDataClass? google) { + this.Tenstorrent = tenstorrent; this.Openai = openai; this.Xai = xai; this.Google = google; - this.Tenstorrent = tenstorrent; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TranslationautomaticTranslationResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TranslationautomaticTranslationResponseModel.g.cs index 9d9f7bd..52eda76 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TranslationautomaticTranslationResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TranslationautomaticTranslationResponseModel.g.cs @@ -11,44 +11,44 @@ public sealed partial class TranslationautomaticTranslationResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("xai")] - public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Xai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("deepl")] + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Deepl { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] - public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Amazon { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("xai")] + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Xai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("modernmt")] - public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Modernmt { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("amazon")] + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Amazon { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("deepl")] - public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Deepl { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("modernmt")] + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Modernmt { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -59,46 +59,46 @@ public sealed partial class TranslationautomaticTranslationResponseModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TranslationautomaticTranslationResponseModel( + global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? deepl, global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? xai, global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? amazon, - global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? modernmt, - global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? deepl, - global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? google, global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? openai, - global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? microsoft) + global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? modernmt, + global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? microsoft, + global::EdenAI.TranslationautomaticTranslationAutomaticTranslationDataClass? google) { + this.Deepl = deepl; this.Xai = xai; this.Amazon = amazon; - this.Modernmt = modernmt; - this.Deepl = deepl; - this.Google = google; this.Openai = openai; + this.Modernmt = modernmt; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.TranslationlanguageDetectionResponseModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.TranslationlanguageDetectionResponseModel.g.cs index c59ad58..714b732 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.TranslationlanguageDetectionResponseModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.TranslationlanguageDetectionResponseModel.g.cs @@ -23,14 +23,8 @@ public sealed partial class TranslationlanguageDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("modernmt")] - public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Modernmt { get; set; } - - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -41,8 +35,8 @@ public sealed partial class TranslationlanguageDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("modernmt")] + public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Modernmt { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -50,6 +44,12 @@ public sealed partial class TranslationlanguageDetectionResponseModel [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Microsoft { get; set; } + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? Google { get; set; } + /// /// Additional properties that are not explicitly defined in the schema /// @@ -65,40 +65,40 @@ public sealed partial class TranslationlanguageDetectionResponseModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// - /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 - /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// + /// + /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 + /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public TranslationlanguageDetectionResponseModel( global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? xai, global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? amazon, - global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? modernmt, - global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? google, - global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? oneai, global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? openai, - global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? microsoft) + global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? oneai, + global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? modernmt, + global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? microsoft, + global::EdenAI.TranslationlanguageDetectionLanguageDetectionDataClass? google) { this.Xai = xai; this.Amazon = amazon; - this.Modernmt = modernmt; - this.Google = google; - this.Oneai = oneai; this.Openai = openai; + this.Oneai = oneai; + this.Modernmt = modernmt; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.VideogenerationAsyncModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.VideogenerationAsyncModel.g.cs index e1909ce..d1ca7d7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.VideogenerationAsyncModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.VideogenerationAsyncModel.g.cs @@ -17,8 +17,8 @@ public sealed partial class VideogenerationAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("minimax")] - public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Minimax { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("openai")] + public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Openai { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 @@ -29,20 +29,20 @@ public sealed partial class VideogenerationAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("minimax")] + public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Minimax { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("openai")] - public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Openai { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] + public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Microsoft { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("microsoft")] - public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Microsoft { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -56,19 +56,19 @@ public sealed partial class VideogenerationAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER @@ -76,18 +76,18 @@ public sealed partial class VideogenerationAsyncModel #endif public VideogenerationAsyncModel( global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? amazon, - global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? minimax, - global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? bytedance, - global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? google, global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? openai, - global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? microsoft) + global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? bytedance, + global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? minimax, + global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? microsoft, + global::EdenAI.VideogenerationAsyncGenerationAsyncDataClass? google) { this.Amazon = amazon; - this.Minimax = minimax; - this.Bytedance = bytedance; - this.Google = google; this.Openai = openai; + this.Bytedance = bytedance; + this.Minimax = minimax; this.Microsoft = microsoft; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.VideologoDetectionAsyncModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.VideologoDetectionAsyncModel.g.cs index 17645e7..0a28d8d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.VideologoDetectionAsyncModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.VideologoDetectionAsyncModel.g.cs @@ -11,14 +11,14 @@ public sealed partial class VideologoDetectionAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("twelvelabs")] + public global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? Twelvelabs { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("twelvelabs")] - public global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? Twelvelabs { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -29,21 +29,21 @@ public sealed partial class VideologoDetectionAsyncModel /// /// Initializes a new instance of the class. /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER [global::System.Diagnostics.CodeAnalysis.SetsRequiredMembers] #endif public VideologoDetectionAsyncModel( - global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? google, - global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? twelvelabs) + global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? twelvelabs, + global::EdenAI.VideologoDetectionAsyncLogoDetectionAsyncDataClass? google) { - this.Google = google; this.Twelvelabs = twelvelabs; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Models.VideotextDetectionAsyncModel.g.cs b/src/libs/EdenAI/Generated/EdenAI.Models.VideotextDetectionAsyncModel.g.cs index ed4b418..b70e872 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Models.VideotextDetectionAsyncModel.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Models.VideotextDetectionAsyncModel.g.cs @@ -17,14 +17,14 @@ public sealed partial class VideotextDetectionAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("google")] - public global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? Google { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("twelvelabs")] + public global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? Twelvelabs { get; set; } /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - [global::System.Text.Json.Serialization.JsonPropertyName("twelvelabs")] - public global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? Twelvelabs { get; set; } + [global::System.Text.Json.Serialization.JsonPropertyName("google")] + public global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? Google { get; set; } /// /// Additional properties that are not explicitly defined in the schema @@ -38,10 +38,10 @@ public sealed partial class VideotextDetectionAsyncModel /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// - /// + /// /// Default Value: openapi-json-null-sentinel-value-2BF93600-0FE4-4250-987A-E5DDB203E464 /// #if NET7_0_OR_GREATER @@ -49,12 +49,12 @@ public sealed partial class VideotextDetectionAsyncModel #endif public VideotextDetectionAsyncModel( global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? amazon, - global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? google, - global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? twelvelabs) + global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? twelvelabs, + global::EdenAI.VideotextDetectionAsyncTextDetectionAsyncDataClass? google) { this.Amazon = amazon; - this.Google = google; this.Twelvelabs = twelvelabs; + this.Google = google; } /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ModerationClient.TextTextModerationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ModerationClient.TextTextModerationCreate.g.cs index 7097485..d39ddbe 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ModerationClient.TextTextModerationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ModerationClient.TextTextModerationCreate.g.cs @@ -188,7 +188,7 @@ partial void ProcessTextTextModerationCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -225,13 +225,13 @@ partial void ProcessTextTextModerationCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -263,13 +263,13 @@ partial void ProcessTextTextModerationCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -301,13 +301,13 @@ partial void ProcessTextTextModerationCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -339,13 +339,13 @@ partial void ProcessTextTextModerationCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -389,7 +389,7 @@ partial void ProcessTextTextModerationCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextmoderationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextmoderationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -419,7 +419,7 @@ partial void ProcessTextTextModerationCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextmoderationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextmoderationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ModerationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ModerationClient.g.cs index 2e6ee10..a29dce7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ModerationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ModerationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ModerationClient : global::EdenAI.IModerationClient, /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.TextTextNamedEntityRecognitionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.TextTextNamedEntityRecognitionCreate.g.cs index 4359cd9..ba7e2cb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.TextTextNamedEntityRecognitionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.TextTextNamedEntityRecognitionCreate.g.cs @@ -116,7 +116,7 @@ partial void ProcessTextTextNamedEntityRecognitionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -153,13 +153,13 @@ partial void ProcessTextTextNamedEntityRecognitionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -191,13 +191,13 @@ partial void ProcessTextTextNamedEntityRecognitionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -229,13 +229,13 @@ partial void ProcessTextTextNamedEntityRecognitionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -267,13 +267,13 @@ partial void ProcessTextTextNamedEntityRecognitionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -317,7 +317,7 @@ partial void ProcessTextTextNamedEntityRecognitionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextnamedEntityRecognitionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextnamedEntityRecognitionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -347,7 +347,7 @@ partial void ProcessTextTextNamedEntityRecognitionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextnamedEntityRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextnamedEntityRecognitionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.g.cs index a81c181..f053b82 100644 --- a/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.NamedEntityRecognitionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class NamedEntityRecognitionClient : global::EdenAI.INamed /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.ImageImageObjectDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.ImageImageObjectDetectionCreate.g.cs index 9290665..7e934bc 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.ImageImageObjectDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.ImageImageObjectDetectionCreate.g.cs @@ -84,7 +84,7 @@ partial void ProcessImageImageObjectDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -121,13 +121,13 @@ partial void ProcessImageImageObjectDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -159,13 +159,13 @@ partial void ProcessImageImageObjectDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -197,13 +197,13 @@ partial void ProcessImageImageObjectDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -235,13 +235,13 @@ partial void ProcessImageImageObjectDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -285,7 +285,7 @@ partial void ProcessImageImageObjectDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImageobjectDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImageobjectDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -315,7 +315,7 @@ partial void ProcessImageImageObjectDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImageobjectDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImageobjectDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.g.cs index 2a7bbef..e500f61 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ObjectDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ObjectDetectionClient : global::EdenAI.IObjectDetect /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncCreate.g.cs index b3baa3a..7da5899 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncCreate.g.cs @@ -189,7 +189,7 @@ partial void ProcessVideoVideoObjectTrackingAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -219,7 +219,7 @@ partial void ProcessVideoVideoObjectTrackingAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve.g.cs index 8fbe13e..85bc22f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve2.g.cs index 72be17e..7c04bb3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.VideoVideoObjectTrackingAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideoobjectTrackingAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideoobjectTrackingAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoObjectTrackingAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideoobjectTrackingAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideoobjectTrackingAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.g.cs index 1728f81..35553a0 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ObjectTrackingAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ObjectTrackingAsyncClient : global::EdenAI.IObjectTr /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncCreate.g.cs index 58d9e3e..7527c27 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncCreate.g.cs @@ -198,7 +198,7 @@ partial void ProcessOcrOcrOcrAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -228,7 +228,7 @@ partial void ProcessOcrOcrOcrAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve.g.cs index 042f932..499c508 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessOcrOcrOcrAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessOcrOcrOcrAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve2.g.cs index 2ca6a49..0a45755 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.OcrOcrOcrAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessOcrOcrOcrAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessOcrOcrOcrAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessOcrOcrOcrAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessOcrOcrOcrAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessOcrOcrOcrAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncocrocrAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncocrocrAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessOcrOcrOcrAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncocrocrAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncocrocrAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.g.cs index eeee848..d07162a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class OcrAsyncClient : global::EdenAI.IOcrAsyncClient, glo /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrClient.OcrOcrOcrCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrClient.OcrOcrOcrCreate.g.cs index 23cb7ff..4a4993d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrClient.OcrOcrOcrCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrClient.OcrOcrOcrCreate.g.cs @@ -299,7 +299,7 @@ partial void ProcessOcrOcrOcrCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -336,13 +336,13 @@ partial void ProcessOcrOcrOcrCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -374,13 +374,13 @@ partial void ProcessOcrOcrOcrCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -412,13 +412,13 @@ partial void ProcessOcrOcrOcrCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -450,13 +450,13 @@ partial void ProcessOcrOcrOcrCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -500,7 +500,7 @@ partial void ProcessOcrOcrOcrCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.OcrocrResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.OcrocrResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -530,7 +530,7 @@ partial void ProcessOcrOcrOcrCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.OcrocrResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.OcrocrResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrClient.g.cs index 5ad86d8..3c093aa 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class OcrClient : global::EdenAI.IOcrClient, global::Syste /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncCreate.g.cs index 166677f..60be7f1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncCreate.g.cs @@ -387,7 +387,7 @@ partial void ProcessOcrOcrOcrTablesAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -417,7 +417,7 @@ partial void ProcessOcrOcrOcrTablesAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve.g.cs index 00bcf0d..7716357 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve2.g.cs index c12f795..31bbbe3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.OcrOcrOcrTablesAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncocrocrTablesAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncocrocrTablesAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessOcrOcrOcrTablesAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncocrocrTablesAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncocrocrTablesAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.g.cs index 68ebf82..7bdfe7f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.OcrTablesAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class OcrTablesAsyncClient : global::EdenAI.IOcrTablesAsyn /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncCreate.g.cs index a0d51bf..024e803 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncCreate.g.cs @@ -190,7 +190,7 @@ partial void ProcessVideoVideoPersonTrackingAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -220,7 +220,7 @@ partial void ProcessVideoVideoPersonTrackingAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve.g.cs index 7570da0..81e9672 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve2.g.cs index 3693f40..2413b9b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.VideoVideoPersonTrackingAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideopersonTrackingAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideopersonTrackingAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoPersonTrackingAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideopersonTrackingAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideopersonTrackingAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.g.cs index e2e614f..fbddd89 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PersonTrackingAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class PersonTrackingAsyncClient : global::EdenAI.IPersonTr /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.TextTextPlagiaDetectionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.TextTextPlagiaDetectionCreate.g.cs index 0c75c23..2d9eb75 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.TextTextPlagiaDetectionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.TextTextPlagiaDetectionCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessTextTextPlagiaDetectionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -109,13 +109,13 @@ partial void ProcessTextTextPlagiaDetectionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -147,13 +147,13 @@ partial void ProcessTextTextPlagiaDetectionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -185,13 +185,13 @@ partial void ProcessTextTextPlagiaDetectionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -223,13 +223,13 @@ partial void ProcessTextTextPlagiaDetectionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,7 +273,7 @@ partial void ProcessTextTextPlagiaDetectionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextplagiaDetectionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextplagiaDetectionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -303,7 +303,7 @@ partial void ProcessTextTextPlagiaDetectionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextplagiaDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextplagiaDetectionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.g.cs index 5cd5d41..5baf5fb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PlagiaDetectionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class PlagiaDetectionClient : global::EdenAI.IPlagiaDetect /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.Polyfills.g.cs b/src/libs/EdenAI/Generated/EdenAI.Polyfills.g.cs index 9d6a4c1..7bc30ad 100644 --- a/src/libs/EdenAI/Generated/EdenAI.Polyfills.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.Polyfills.g.cs @@ -53,6 +53,10 @@ public static partial class AutoSdkPolyfills /// /// Creates a JSON request content instance. /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif public static global::System.Net.Http.HttpContent CreateJsonContent( T inputValue, string mediaType, @@ -124,6 +128,10 @@ public static partial class AutoSdkPolyfills /// /// Reads JSON content into the specified type using serializer options. /// +#if NET8_0_OR_GREATER + [global::System.Diagnostics.CodeAnalysis.RequiresUnreferencedCode("JSON serialization and deserialization might require types that cannot be statically analyzed. Use the overload that takes a JsonTypeInfo or JsonSerializerContext, or make sure all of the required types are preserved.")] + [global::System.Diagnostics.CodeAnalysis.RequiresDynamicCode("JSON serialization and deserialization might require types that cannot be statically analyzed and might need runtime code generation. Use System.Text.Json source generation for native AOT applications.")] +#endif public static async global::System.Threading.Tasks.Task ReadFromJsonAsync( this global::System.Net.Http.HttpContent content, global::System.Text.Json.JsonSerializerOptions? jsonSerializerOptions, diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.TextTextPromptOptimizationCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.TextTextPromptOptimizationCreate.g.cs index c667689..9c39b38 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.TextTextPromptOptimizationCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.TextTextPromptOptimizationCreate.g.cs @@ -153,7 +153,7 @@ partial void ProcessTextTextPromptOptimizationCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -190,13 +190,13 @@ partial void ProcessTextTextPromptOptimizationCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -228,13 +228,13 @@ partial void ProcessTextTextPromptOptimizationCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -266,13 +266,13 @@ partial void ProcessTextTextPromptOptimizationCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -304,13 +304,13 @@ partial void ProcessTextTextPromptOptimizationCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -354,7 +354,7 @@ partial void ProcessTextTextPromptOptimizationCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextpromptOptimizationResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextpromptOptimizationResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -384,7 +384,7 @@ partial void ProcessTextTextPromptOptimizationCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextpromptOptimizationResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextpromptOptimizationResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.g.cs index 4e9c239..f5bd230 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptOptimizationClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class PromptOptimizationClient : global::EdenAI.IPromptOpt /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate.g.cs index d95c58d..dbfa239 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate.g.cs @@ -67,7 +67,7 @@ partial void ProcessPromptsPromptsCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -116,7 +116,7 @@ partial void ProcessPromptsPromptsCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptCreate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptCreate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -146,7 +146,7 @@ partial void ProcessPromptsPromptsCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptCreate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptCreate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate2.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate2.g.cs index 96f0861..993d61f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsCreate2.g.cs @@ -73,7 +73,7 @@ partial void ProcessPromptsPromptsCreate2ResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -123,7 +123,7 @@ partial void ProcessPromptsPromptsCreate2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -153,7 +153,7 @@ partial void ProcessPromptsPromptsCreate2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryCreate.g.cs index b8d745b..864b02c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessPromptsPromptsHistoryCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessPromptsPromptsHistoryCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessPromptsPromptsHistoryCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryList.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryList.g.cs index 839db4b..d784ce7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryList.g.cs @@ -123,7 +123,7 @@ partial void ProcessPromptsPromptsHistoryListResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PaginatedPromptHistoryList.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PaginatedPromptHistoryList.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -153,7 +153,7 @@ partial void ProcessPromptsPromptsHistoryListResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PaginatedPromptHistoryList.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PaginatedPromptHistoryList.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryPartialUpdate.g.cs index a851f05..105b237 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryPartialUpdate.g.cs @@ -77,7 +77,7 @@ partial void ProcessPromptsPromptsHistoryPartialUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -128,7 +128,7 @@ partial void ProcessPromptsPromptsHistoryPartialUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -158,7 +158,7 @@ partial void ProcessPromptsPromptsHistoryPartialUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryRetrieve.g.cs index 4e407ef..a0dc3e0 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryRetrieve.g.cs @@ -113,7 +113,7 @@ partial void ProcessPromptsPromptsHistoryRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -143,7 +143,7 @@ partial void ProcessPromptsPromptsHistoryRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryUpdate.g.cs index fd57bce..8bbea6b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsHistoryUpdate.g.cs @@ -77,7 +77,7 @@ partial void ProcessPromptsPromptsHistoryUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -128,7 +128,7 @@ partial void ProcessPromptsPromptsHistoryUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptHistory.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -158,7 +158,7 @@ partial void ProcessPromptsPromptsHistoryUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptHistory.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsList.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsList.g.cs index fa17657..59a6ac9 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsList.g.cs @@ -117,7 +117,7 @@ partial void ProcessPromptsPromptsListResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PaginatedPromptCreateList.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PaginatedPromptCreateList.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -147,7 +147,7 @@ partial void ProcessPromptsPromptsListResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PaginatedPromptCreateList.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PaginatedPromptCreateList.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsPartialUpdate.g.cs index fe5734c..a6bd107 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsPartialUpdate.g.cs @@ -72,7 +72,7 @@ partial void ProcessPromptsPromptsPartialUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessPromptsPromptsPartialUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessPromptsPromptsPartialUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsRetrieve.g.cs index ad668d3..6dbdb60 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsRetrieve.g.cs @@ -107,7 +107,7 @@ partial void ProcessPromptsPromptsRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -137,7 +137,7 @@ partial void ProcessPromptsPromptsRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsUpdate.g.cs index 4a4879e..9837ff1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.PromptsPromptsUpdate.g.cs @@ -72,7 +72,7 @@ partial void ProcessPromptsPromptsUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessPromptsPromptsUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.PromptUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessPromptsPromptsUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.PromptUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.g.cs index a0a9c84..3e50ec5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.PromptsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.PromptsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class PromptsClient : global::EdenAI.IPromptsClient, globa /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncCreate.g.cs index a7922fe..4fdd9a1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncCreate.g.cs @@ -223,7 +223,7 @@ partial void ProcessVideoVideoQuestionAnswerAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -253,7 +253,7 @@ partial void ProcessVideoVideoQuestionAnswerAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve.g.cs index e41d995..971222c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve2.g.cs index 9fe3161..a098305 100644 --- a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.VideoVideoQuestionAnswerAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideoquestionAnswerAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideoquestionAnswerAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoQuestionAnswerAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideoquestionAnswerAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideoquestionAnswerAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.g.cs index 3cf8604..2f32554 100644 --- a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class QuestionAnswerAsyncClient : global::EdenAI.IQuestion /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.ImageImageQuestionAnswerCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.ImageImageQuestionAnswerCreate.g.cs index 72cd7fd..a01d7e4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.ImageImageQuestionAnswerCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.ImageImageQuestionAnswerCreate.g.cs @@ -99,7 +99,7 @@ partial void ProcessImageImageQuestionAnswerCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -136,13 +136,13 @@ partial void ProcessImageImageQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -174,13 +174,13 @@ partial void ProcessImageImageQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -212,13 +212,13 @@ partial void ProcessImageImageQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -250,13 +250,13 @@ partial void ProcessImageImageQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -300,7 +300,7 @@ partial void ProcessImageImageQuestionAnswerCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagequestionAnswerResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagequestionAnswerResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -330,7 +330,7 @@ partial void ProcessImageImageQuestionAnswerCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagequestionAnswerResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagequestionAnswerResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.VideoVideoQuestionAnswerCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.VideoVideoQuestionAnswerCreate.g.cs index 5988c45..e189338 100644 --- a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.VideoVideoQuestionAnswerCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.VideoVideoQuestionAnswerCreate.g.cs @@ -92,7 +92,7 @@ partial void ProcessVideoVideoQuestionAnswerCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -129,13 +129,13 @@ partial void ProcessVideoVideoQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -167,13 +167,13 @@ partial void ProcessVideoVideoQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -205,13 +205,13 @@ partial void ProcessVideoVideoQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -243,13 +243,13 @@ partial void ProcessVideoVideoQuestionAnswerCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -293,7 +293,7 @@ partial void ProcessVideoVideoQuestionAnswerCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.VideoquestionAnswerResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.VideoquestionAnswerResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -323,7 +323,7 @@ partial void ProcessVideoVideoQuestionAnswerCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.VideoquestionAnswerResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.VideoquestionAnswerResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.g.cs index 994af09..121cdb9 100644 --- a/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.QuestionAnswerClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class QuestionAnswerClient : global::EdenAI.IQuestionAnswe /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetCreate.g.cs index 4e1fa71..d06daf0 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetCreate.g.cs @@ -72,7 +72,7 @@ partial void ProcessResourcesResourcesAssetCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessResourcesResourcesAssetCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AssetCreate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AssetCreate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessResourcesResourcesAssetCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AssetCreate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AssetCreate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetPartialUpdate.g.cs index fc5779c..f39de7c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetPartialUpdate.g.cs @@ -77,7 +77,7 @@ partial void ProcessResourcesResourcesAssetPartialUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -128,7 +128,7 @@ partial void ProcessResourcesResourcesAssetPartialUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AssetUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AssetUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -158,7 +158,7 @@ partial void ProcessResourcesResourcesAssetPartialUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AssetUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AssetUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetRetrieve.g.cs index 101edf9..4bc823c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetRetrieve.g.cs @@ -113,7 +113,7 @@ partial void ProcessResourcesResourcesAssetRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AssetUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AssetUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -143,7 +143,7 @@ partial void ProcessResourcesResourcesAssetRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AssetUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AssetUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetUpdate.g.cs index 4cc18f4..0c2a9b9 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesAssetUpdate.g.cs @@ -77,7 +77,7 @@ partial void ProcessResourcesResourcesAssetUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -128,7 +128,7 @@ partial void ProcessResourcesResourcesAssetUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AssetUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AssetUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -158,7 +158,7 @@ partial void ProcessResourcesResourcesAssetUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AssetUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AssetUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesCreate.g.cs index 582377c..fbf2986 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesCreate.g.cs @@ -67,7 +67,7 @@ partial void ProcessResourcesResourcesCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -116,7 +116,7 @@ partial void ProcessResourcesResourcesCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ResourceCreate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ResourceCreate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -146,7 +146,7 @@ partial void ProcessResourcesResourcesCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ResourceCreate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ResourceCreate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesList.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesList.g.cs index eff3e15..f3e3b5a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesList.g.cs @@ -101,7 +101,7 @@ partial void ProcessResourcesResourcesListResponseContent( __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -131,7 +131,7 @@ partial void ProcessResourcesResourcesListResponseContent( ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesPartialUpdate.g.cs index f46c2f6..91caae7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesPartialUpdate.g.cs @@ -72,7 +72,7 @@ partial void ProcessResourcesResourcesPartialUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessResourcesResourcesPartialUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ResourceUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ResourceUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessResourcesResourcesPartialUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ResourceUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ResourceUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesRetrieve.g.cs index dea8b1e..032426e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesRetrieve.g.cs @@ -107,7 +107,7 @@ partial void ProcessResourcesResourcesRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ResourceUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ResourceUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -137,7 +137,7 @@ partial void ProcessResourcesResourcesRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ResourceUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ResourceUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesUpdate.g.cs index 89c17b9..d01def5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.ResourcesResourcesUpdate.g.cs @@ -72,7 +72,7 @@ partial void ProcessResourcesResourcesUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessResourcesResourcesUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ResourceUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ResourceUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessResourcesResourcesUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ResourceUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ResourceUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.g.cs index 63cd5c1..f4f9875 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResourcesClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ResourcesClient : global::EdenAI.IResourcesClient, g /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.OcrOcrResumeParserCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.OcrOcrResumeParserCreate.g.cs index 584dd55..330517f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.OcrOcrResumeParserCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.OcrOcrResumeParserCreate.g.cs @@ -202,7 +202,7 @@ partial void ProcessOcrOcrResumeParserCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -239,13 +239,13 @@ partial void ProcessOcrOcrResumeParserCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -277,13 +277,13 @@ partial void ProcessOcrOcrResumeParserCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -315,13 +315,13 @@ partial void ProcessOcrOcrResumeParserCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -353,13 +353,13 @@ partial void ProcessOcrOcrResumeParserCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -403,7 +403,7 @@ partial void ProcessOcrOcrResumeParserCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.OcrresumeParserResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.OcrresumeParserResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -433,7 +433,7 @@ partial void ProcessOcrOcrResumeParserCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.OcrresumeParserResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.OcrresumeParserResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.g.cs index 76e4990..d51436d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ResumeParserClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ResumeParserClient : global::EdenAI.IResumeParserCli /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchDeleteImageCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchDeleteImageCreate.g.cs index 888abc1..0479964 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchDeleteImageCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchDeleteImageCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessImageImageSearchDeleteImageCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessImageImageSearchDeleteImageCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessImageImageSearchDeleteImageCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessImageImageSearchDeleteImageCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessImageImageSearchDeleteImageCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessImageImageSearchDeleteImageCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessImageImageSearchDeleteImageCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImageRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImageRetrieve.g.cs index 4b6eabd..d838cc9 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImageRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImageRetrieve.g.cs @@ -165,13 +165,13 @@ partial void ProcessImageImageSearchGetImageRetrieveResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -203,13 +203,13 @@ partial void ProcessImageImageSearchGetImageRetrieveResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -241,13 +241,13 @@ partial void ProcessImageImageSearchGetImageRetrieveResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -279,13 +279,13 @@ partial void ProcessImageImageSearchGetImageRetrieveResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -329,7 +329,7 @@ partial void ProcessImageImageSearchGetImageRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -359,7 +359,7 @@ partial void ProcessImageImageSearchGetImageRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImagesRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImagesRetrieve.g.cs index eae44c0..bb3f72a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImagesRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchGetImagesRetrieve.g.cs @@ -158,13 +158,13 @@ partial void ProcessImageImageSearchGetImagesRetrieveResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -196,13 +196,13 @@ partial void ProcessImageImageSearchGetImagesRetrieveResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -234,13 +234,13 @@ partial void ProcessImageImageSearchGetImagesRetrieveResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -272,13 +272,13 @@ partial void ProcessImageImageSearchGetImagesRetrieveResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -322,7 +322,7 @@ partial void ProcessImageImageSearchGetImagesRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -352,7 +352,7 @@ partial void ProcessImageImageSearchGetImagesRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchLaunchSimilarityCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchLaunchSimilarityCreate.g.cs index 6451114..2bd1d8e 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchLaunchSimilarityCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchLaunchSimilarityCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessImageImageSearchLaunchSimilarityCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessImageImageSearchLaunchSimilarityCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessImageImageSearchLaunchSimilarityCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessImageImageSearchLaunchSimilarityCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessImageImageSearchLaunchSimilarityCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessImageImageSearchLaunchSimilarityCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessImageImageSearchLaunchSimilarityCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchUploadImageCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchUploadImageCreate.g.cs index b38bc2f..92e05d7 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchUploadImageCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SearchClient.ImageImageSearchUploadImageCreate.g.cs @@ -73,7 +73,7 @@ partial void ProcessImageImageSearchUploadImageCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -110,13 +110,13 @@ partial void ProcessImageImageSearchUploadImageCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -148,13 +148,13 @@ partial void ProcessImageImageSearchUploadImageCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -186,13 +186,13 @@ partial void ProcessImageImageSearchUploadImageCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -224,13 +224,13 @@ partial void ProcessImageImageSearchUploadImageCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -274,7 +274,7 @@ partial void ProcessImageImageSearchUploadImageCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ImagesearchResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -304,7 +304,7 @@ partial void ProcessImageImageSearchUploadImageCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ImagesearchResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SearchClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.SearchClient.g.cs index 94899c3..df0ddb4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SearchClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SearchClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class SearchClient : global::EdenAI.ISearchClient, global: /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.TextTextSentimentAnalysisCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.TextTextSentimentAnalysisCreate.g.cs index b4ccaa0..9bf195f 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.TextTextSentimentAnalysisCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.TextTextSentimentAnalysisCreate.g.cs @@ -284,7 +284,7 @@ partial void ProcessTextTextSentimentAnalysisCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -321,13 +321,13 @@ partial void ProcessTextTextSentimentAnalysisCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -359,13 +359,13 @@ partial void ProcessTextTextSentimentAnalysisCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -397,13 +397,13 @@ partial void ProcessTextTextSentimentAnalysisCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -435,13 +435,13 @@ partial void ProcessTextTextSentimentAnalysisCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -485,7 +485,7 @@ partial void ProcessTextTextSentimentAnalysisCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextsentimentAnalysisResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextsentimentAnalysisResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -515,7 +515,7 @@ partial void ProcessTextTextSentimentAnalysisCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextsentimentAnalysisResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextsentimentAnalysisResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.g.cs index 1194dca..b49805c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SentimentAnalysisClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class SentimentAnalysisClient : global::EdenAI.ISentimentA /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncCreate.g.cs index a004824..7a2a626 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncCreate.g.cs @@ -189,7 +189,7 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -219,7 +219,7 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve.g.cs index 181b045..c971fdb 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve2.g.cs index ac7c9e7..2793b63 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.VideoVideoShotChangeDetectionAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideoshotChangeDetectionAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideoshotChangeDetectionAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoShotChangeDetectionAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideoshotChangeDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideoshotChangeDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.g.cs index 1a6f8c3..4a9f5f2 100644 --- a/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.ShotChangeDetectionAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class ShotChangeDetectionAsyncClient : global::EdenAI.ISho /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncCreate.g.cs index c95b573..adcaa50 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncCreate.g.cs @@ -511,7 +511,7 @@ partial void ProcessAudioAudioSpeechToTextAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -541,7 +541,7 @@ partial void ProcessAudioAudioSpeechToTextAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve.g.cs index b7aed8b..d71efe3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve2.g.cs index d08a518..6db34e6 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.AudioAudioSpeechToTextAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncaudiospeechToTextAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncaudiospeechToTextAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessAudioAudioSpeechToTextAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncaudiospeechToTextAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncaudiospeechToTextAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.g.cs index 078a98b..06c439d 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SpeechToTextAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class SpeechToTextAsyncClient : global::EdenAI.ISpeechToTe /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.TextTextSpellCheckCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.TextTextSpellCheckCreate.g.cs index 04524b5..7a96d01 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.TextTextSpellCheckCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.TextTextSpellCheckCreate.g.cs @@ -198,7 +198,7 @@ partial void ProcessTextTextSpellCheckCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -235,13 +235,13 @@ partial void ProcessTextTextSpellCheckCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -273,13 +273,13 @@ partial void ProcessTextTextSpellCheckCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -311,13 +311,13 @@ partial void ProcessTextTextSpellCheckCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -349,13 +349,13 @@ partial void ProcessTextTextSpellCheckCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -399,7 +399,7 @@ partial void ProcessTextTextSpellCheckCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextspellCheckResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextspellCheckResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -429,7 +429,7 @@ partial void ProcessTextTextSpellCheckCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextspellCheckResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextspellCheckResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.g.cs index 3acb0aa..96d2a19 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SpellCheckClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class SpellCheckClient : global::EdenAI.ISpellCheckClient, /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.TextTextSummarizeCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.TextTextSummarizeCreate.g.cs index 0ca9c26..c0cbda5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.TextTextSummarizeCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.TextTextSummarizeCreate.g.cs @@ -234,7 +234,7 @@ partial void ProcessTextTextSummarizeCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -271,13 +271,13 @@ partial void ProcessTextTextSummarizeCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -309,13 +309,13 @@ partial void ProcessTextTextSummarizeCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -347,13 +347,13 @@ partial void ProcessTextTextSummarizeCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -385,13 +385,13 @@ partial void ProcessTextTextSummarizeCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -435,7 +435,7 @@ partial void ProcessTextTextSummarizeCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TextsummarizeResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TextsummarizeResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -465,7 +465,7 @@ partial void ProcessTextTextSummarizeCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TextsummarizeResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TextsummarizeResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.g.cs index 62402b5..7b744d5 100644 --- a/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.SummarizeClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class SummarizeClient : global::EdenAI.ISummarizeClient, g /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncCreate.g.cs index 03cf04f..303e3b8 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncCreate.g.cs @@ -191,7 +191,7 @@ partial void ProcessVideoVideoTextDetectionAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -221,7 +221,7 @@ partial void ProcessVideoVideoTextDetectionAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve.g.cs index 6622b71..9f546f1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve2.g.cs index e2b251c..f4bd35b 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.VideoVideoTextDetectionAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncvideotextDetectionAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncvideotextDetectionAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessVideoVideoTextDetectionAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncvideotextDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncvideotextDetectionAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.g.cs index 34925da..5446175 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextDetectionAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class TextDetectionAsyncClient : global::EdenAI.ITextDetec /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncCreate.g.cs index 5f2792f..5c89432 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncCreate.g.cs @@ -458,7 +458,7 @@ partial void ProcessAudioAudioTextToSpeechAsyncCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.LaunchAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -488,7 +488,7 @@ partial void ProcessAudioAudioTextToSpeechAsyncCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.LaunchAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve.g.cs index 08ec645..2dfee41 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve.g.cs @@ -103,7 +103,7 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.ListAsyncJobResponse.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.ListAsyncJobResponse.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve2.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve2.g.cs index 5855bc6..344697c 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve2.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.AudioAudioTextToSpeechAsyncRetrieve2.g.cs @@ -125,13 +125,13 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -163,13 +163,13 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -201,13 +201,13 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -239,13 +239,13 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieve2ResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -289,7 +289,7 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieve2ResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AsyncaudiotextToSpeechAsyncResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AsyncaudiotextToSpeechAsyncResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -319,7 +319,7 @@ partial void ProcessAudioAudioTextToSpeechAsyncRetrieve2ResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AsyncaudiotextToSpeechAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AsyncaudiotextToSpeechAsyncResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.g.cs index cb11e76..221a6c0 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechAsyncClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class TextToSpeechAsyncClient : global::EdenAI.ITextToSpee /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.AudioAudioTextToSpeechCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.AudioAudioTextToSpeechCreate.g.cs index cadf8dd..c4580e3 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.AudioAudioTextToSpeechCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.AudioAudioTextToSpeechCreate.g.cs @@ -331,7 +331,7 @@ partial void ProcessAudioAudioTextToSpeechCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -368,13 +368,13 @@ partial void ProcessAudioAudioTextToSpeechCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -406,13 +406,13 @@ partial void ProcessAudioAudioTextToSpeechCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -444,13 +444,13 @@ partial void ProcessAudioAudioTextToSpeechCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -482,13 +482,13 @@ partial void ProcessAudioAudioTextToSpeechCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -532,7 +532,7 @@ partial void ProcessAudioAudioTextToSpeechCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.AudiotextToSpeechResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.AudiotextToSpeechResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -562,7 +562,7 @@ partial void ProcessAudioAudioTextToSpeechCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.AudiotextToSpeechResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.AudiotextToSpeechResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.g.cs index 1df644d..05fe107 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TextToSpeechClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class TextToSpeechClient : global::EdenAI.ITextToSpeechCli /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.TextTextTopicExtractionCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.TextTextTopicExtractionCreate.g.cs index 2b99f4a..6d6f487 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.TextTextTopicExtractionCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.TextTextTopicExtractionCreate.g.cs @@ -89,7 +89,7 @@ partial void ProcessTextTextTopicExtractionCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -126,13 +126,13 @@ partial void ProcessTextTextTopicExtractionCreateResponseContent( if (ReadResponseAsString) { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } else { __content_400 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerOptions); + __value_400 = global::EdenAI.BadRequest.FromJson(__content_400, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -164,13 +164,13 @@ partial void ProcessTextTextTopicExtractionCreateResponseContent( if (ReadResponseAsString) { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } else { __content_500 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerOptions); + __value_500 = global::EdenAI.Error.FromJson(__content_500, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -202,13 +202,13 @@ partial void ProcessTextTextTopicExtractionCreateResponseContent( if (ReadResponseAsString) { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } else { __content_403 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerOptions); + __value_403 = global::EdenAI.Error.FromJson(__content_403, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -240,13 +240,13 @@ partial void ProcessTextTextTopicExtractionCreateResponseContent( if (ReadResponseAsString) { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } else { __content_404 = await __response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerOptions); + __value_404 = global::EdenAI.NotFoundResponse.FromJson(__content_404, JsonSerializerContext); } } catch (global::System.Exception __ex) @@ -290,7 +290,7 @@ partial void ProcessTextTextTopicExtractionCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.TexttopicExtractionResponseModel.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.TexttopicExtractionResponseModel.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -320,7 +320,7 @@ partial void ProcessTextTextTopicExtractionCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.TexttopicExtractionResponseModel.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.TexttopicExtractionResponseModel.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.g.cs index 2194c97..0760fab 100644 --- a/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.TopicExtractionClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class TopicExtractionClient : global::EdenAI.ITopicExtract /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenCreate.g.cs b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenCreate.g.cs index 315463e..8a5f737 100644 --- a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenCreate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenCreate.g.cs @@ -67,7 +67,7 @@ partial void ProcessUserUserCustomTokenCreateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -116,7 +116,7 @@ partial void ProcessUserUserCustomTokenCreateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.CustomTokensCreate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.CustomTokensCreate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -146,7 +146,7 @@ partial void ProcessUserUserCustomTokenCreateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.CustomTokensCreate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.CustomTokensCreate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenList.g.cs b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenList.g.cs index 90f95f2..d620ad4 100644 --- a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenList.g.cs @@ -101,7 +101,7 @@ partial void ProcessUserUserCustomTokenListResponseContent( __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -131,7 +131,7 @@ partial void ProcessUserUserCustomTokenListResponseContent( ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenPartialUpdate.g.cs index ed41129..8d27acf 100644 --- a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenPartialUpdate.g.cs @@ -72,7 +72,7 @@ partial void ProcessUserUserCustomTokenPartialUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessUserUserCustomTokenPartialUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.CustomTokenUpdate.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.CustomTokenUpdate.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessUserUserCustomTokenPartialUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.CustomTokenUpdate.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.CustomTokenUpdate.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenRetrieve.g.cs index 1d2438b..61285dd 100644 --- a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.UserUserCustomTokenRetrieve.g.cs @@ -107,7 +107,7 @@ partial void ProcessUserUserCustomTokenRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.CustomTokensList.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.CustomTokensList.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -137,7 +137,7 @@ partial void ProcessUserUserCustomTokenRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.CustomTokensList.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.CustomTokensList.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.g.cs index 0383cdd..230647a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.UserManagementClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class UserManagementClient : global::EdenAI.IUserManagemen /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookPartialUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookPartialUpdate.g.cs index e2cb242..bd09f23 100644 --- a/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookPartialUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookPartialUpdate.g.cs @@ -72,7 +72,7 @@ partial void ProcessWorkflowWorkflowWebhookPartialUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessWorkflowWorkflowWebhookPartialUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.WebhookParameters.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.WebhookParameters.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessWorkflowWorkflowWebhookPartialUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.WebhookParameters.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.WebhookParameters.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookUpdate.g.cs b/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookUpdate.g.cs index bf91cf2..10565c1 100644 --- a/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookUpdate.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.WebhookClient.WorkflowWorkflowWebhookUpdate.g.cs @@ -72,7 +72,7 @@ partial void ProcessWorkflowWorkflowWebhookUpdateResponseContent( __httpRequest.Headers.Add(__authorization.Name, __authorization.Value); } } - var __httpRequestContentBody = request.ToJson(JsonSerializerOptions); + var __httpRequestContentBody = request.ToJson(JsonSerializerContext); var __httpRequestContent = new global::System.Net.Http.StringContent( content: __httpRequestContentBody, encoding: global::System.Text.Encoding.UTF8, @@ -122,7 +122,7 @@ partial void ProcessWorkflowWorkflowWebhookUpdateResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.WebhookParameters.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.WebhookParameters.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -152,7 +152,7 @@ partial void ProcessWorkflowWorkflowWebhookUpdateResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.WebhookParameters.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.WebhookParameters.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.WebhookClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.WebhookClient.g.cs index f6ae3ff..b0be348 100644 --- a/src/libs/EdenAI/Generated/EdenAI.WebhookClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.WebhookClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class WebhookClient : global::EdenAI.IWebhookClient, globa /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowList.g.cs b/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowList.g.cs index d854061..007098a 100644 --- a/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowList.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowList.g.cs @@ -103,7 +103,7 @@ partial void ProcessWorkflowWorkflowListResponseContent( __response.EnsureSuccessStatusCode(); return - global::System.Text.Json.JsonSerializer.Deserialize?>(__content, JsonSerializerOptions) ?? + (global::System.Collections.Generic.IList?)global::System.Text.Json.JsonSerializer.Deserialize(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -133,7 +133,7 @@ partial void ProcessWorkflowWorkflowListResponseContent( ).ConfigureAwait(false); return - await global::System.Text.Json.JsonSerializer.DeserializeAsync?>(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + (global::System.Collections.Generic.IList?)await global::System.Text.Json.JsonSerializer.DeserializeAsync(__content, typeof(global::System.Collections.Generic.IList), JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowRetrieve.g.cs b/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowRetrieve.g.cs index 8d22b71..00cfe13 100644 --- a/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowRetrieve.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.WorkflowWorkflowRetrieve.g.cs @@ -108,7 +108,7 @@ partial void ProcessWorkflowWorkflowRetrieveResponseContent( __response.EnsureSuccessStatusCode(); return - global::EdenAI.Workflow.FromJson(__content, JsonSerializerOptions) ?? + global::EdenAI.Workflow.FromJson(__content, JsonSerializerContext) ?? throw new global::System.InvalidOperationException($"Response deserialization failed for \"{__content}\" "); } catch (global::System.Exception __ex) @@ -138,7 +138,7 @@ partial void ProcessWorkflowWorkflowRetrieveResponseContent( ).ConfigureAwait(false); return - await global::EdenAI.Workflow.FromJsonStreamAsync(__content, JsonSerializerOptions).ConfigureAwait(false) ?? + await global::EdenAI.Workflow.FromJsonStreamAsync(__content, JsonSerializerContext).ConfigureAwait(false) ?? throw new global::System.InvalidOperationException("Response deserialization failed."); } catch (global::System.Exception __ex) diff --git a/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.g.cs b/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.g.cs index 96f4dee..d7b36fd 100644 --- a/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.g.cs +++ b/src/libs/EdenAI/Generated/EdenAI.WorkflowsClient.g.cs @@ -33,7 +33,7 @@ public sealed partial class WorkflowsClient : global::EdenAI.IWorkflowsClient, g /// /// /// - public global::System.Text.Json.JsonSerializerOptions JsonSerializerOptions { get; set; } = new global::System.Text.Json.JsonSerializerOptions(); + public global::System.Text.Json.Serialization.JsonSerializerContext JsonSerializerContext { get; set; } = global::EdenAI.SourceGenerationContext.Default; /// diff --git a/src/libs/EdenAI/openapi.json b/src/libs/EdenAI/openapi.json index c2fa42c..68b78b9 100644 --- a/src/libs/EdenAI/openapi.json +++ b/src/libs/EdenAI/openapi.json @@ -1 +1 @@ -{"openapi": "3.0.3", "info": {"title": "Eden AI", "version": "2.0", "description": "Your project description"}, "paths": {"/{feature}/{subfeature}/batch/{name}/": {"get": {"operationId": "feature_batch_retrieve", "description": "Return paginated response of requests with their status and their\nresponses if the request succeeded or errror if failed", "summary": "Get Batch Job Result", "parameters": [{"in": "path", "name": "feature", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "name", "schema": {"type": "string", "minLength": 1}}, {"in": "query", "name": "page", "schema": {"type": "integer", "minimum": 0}}, {"in": "query", "name": "public_id", "schema": {"type": "integer", "minimum": 0}}, {"in": "query", "name": "status", "schema": {"enum": ["succeeded", "failed", "finished", "processing"], "type": "string", "minLength": 1}, "description": "* `succeeded` - Status Succeeded\n* `failed` - Status Failed\n* `finished` - Status Finished\n* `processing` - Status Processing"}, {"in": "path", "name": "subfeature", "schema": {"type": "string"}, "required": true}], "tags": ["Batch"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedBatchResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}, "post": {"operationId": "feature_batch_create", "description": "\nLaunch a async Batch job, given a job name that will be used as its id.\n\nEach request should have the same parameters as you would normally pass to a feature.\n\n\nYou can also pass an optional paramater `name` to help better identify each requests you send.\n\n\nExample with `text`/`sentiment_analysis`:\n\n```json\n\"requests\": [\n {\n \"text\": \"It's -25 outside and I am so hot.\",\n \"language\": \"en\",\n \"providers\": \"google,amazon\"\n },\n {\n \"name\": \"mixed\",\n \"text\": \"Overall I am satisfied with my experience at Amazon, but two areas of major improvement needed.\",\n \"language\": \"en\",\n \"providers\": \"google\"\n },\n ...\n]\n```\n\n\n### Limitations:\nYou can have up to `5` concurrent running Jobs & input up to `500` requests per Batch\n \n\n
Available Features\n\n\n\n|Feature Name|Subfeature Name|\n|------------|---------------|\n|`translation`|`language_detection`|\n|`image`|`face_detection`|\n|`text`|`chat`|\n|`ocr`|`receipt_parser`|\n|`image`|`anonymization`|\n|`audio`|`text_to_speech`|\n|`translation`|`document_translation`|\n|`image`|`logo_detection`|\n|`image`|`landmark_detection`|\n|`image`|`background_removal`|\n|`text`|`topic_extraction`|\n|`text`|`embeddings`|\n|`text`|`custom_classification`|\n|`text`|`anonymization`|\n|`text`|`summarize`|\n|`text`|`custom_named_entity_recognition`|\n|`text`|`generation`|\n|`text`|`code_generation`|\n|`text`|`moderation`|\n|`image`|`generation`|\n|`translation`|`automatic_translation`|\n|`text`|`search`|\n|`text`|`spell_check`|\n|`image`|`object_detection`|\n|`ocr`|`identity_parser`|\n|`image`|`explicit_content`|\n|`ocr`|`invoice_parser`|\n|`ocr`|`resume_parser`|\n|`audio`|`speech_to_text_async`|\n|`ocr`|`ocr_tables_async`|\n|`video`|`text_detection_async`|\n|`text`|`question_answer`|\n|`text`|`keyword_extraction`|\n|`text`|`named_entity_recognition`|\n|`text`|`syntax_analysis`|\n|`text`|`sentiment_analysis`|\n\n
\n\n", "summary": "Launch Batch Job", "parameters": [{"in": "path", "name": "feature", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "subfeature", "schema": {"type": "string"}, "required": true}], "tags": ["Batch"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BatchRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BatchLaunchResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}, "delete": {"operationId": "feature_batch_destroy", "description": "Api view with custom pagination method to return paginated response from any queryset", "summary": "Delete Batch Job", "parameters": [{"in": "path", "name": "feature", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "subfeature", "schema": {"type": "string"}, "required": true}], "tags": ["Batch"], "security": [{"FeatureApiAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/": {"get": {"operationId": "aiproducts_aiproducts_list", "summary": "List Projects", "parameters": [{"in": "query", "name": "project_type", "schema": {"type": "string"}}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/AIProject"}}}}, "description": ""}}}}, "/aiproducts/{project_id}/": {"get": {"operationId": "aiproducts_aiproducts_retrieve", "summary": "Retrieve Project", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AIProject"}}}, "description": ""}, "404": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_create", "description": "Allows you to create a new Ask YODA project with specified details.\n\n
Default Embedding Models\n\n|Name|Value|\n|----|-----|\n|**openai**|`text-embedding-3-large`|\n|**cohere**|`embed-multilingual-v3.0`|\n|**google**|`text-multilingual-embedding-002`|\n|**mistral**|`mistral-embed`|\n|**jina**|`jina-embeddings-v3`|\n\n
", "summary": "Create Project", "tags": ["Custom Chatbot (Create operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskYourDataProjectRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaCreateProjectResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/{prompt_name}/": {"patch": {"operationId": "aiproducts_aiproducts_askyoda_v2_partial_update", "description": "Update the bot prompt for your RAG project using an existing prompt.\nThis endpoint allows you to change the current bot prompt to a different existing prompt.", "summary": "Update Bot Prompt", "parameters": [{"in": "query", "name": "file_urls", "schema": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, {"in": "query", "name": "model", "schema": {"type": "string", "minLength": 1}, "description": "The model used to call the prompt. E.g. openai/gpt-4o", "required": true}, {"in": "query", "name": "name", "schema": {"type": "string", "minLength": 1}, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed.", "required": true}, {"in": "query", "name": "params", "schema": {}, "description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "prompt_name", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "system_prompt", "schema": {"type": "string", "minLength": 1}, "description": "Specify a system prompt for the LLM"}, {"in": "query", "name": "text", "schema": {"type": "string", "minLength": 1}, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```", "required": true}], "tags": ["Custom Chatbot (Update operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_file/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_file_create", "description": "This endpoint enables you to upload files (**CSV**, **AUDIO**, **PDF**, or **XML**) into your project.
Upon upload, the file will be processed and stored as text embeddings within the project's database.
\n\n### Supported File Types\n- **CSV**: Comma-separated values files.\n- **Audio**: Supported audio formats for transcription to text.\n- **PDF**: Portable Document Format files.\n- **XML**: Extensible Markup Language files.\n\n### OCR Processing and Speech-to-Text\nIf the uploaded file is a PDF, it will undergo Optical Character Recognition (OCR) processing using the default provider chosen during the project creation step.
\nSimilarly, audio files will be converted to text using Speech-to-Text, also utilizing the default provider configured during project setup.\n\n### Accepted File Extensions\n- **PDF**\n- **CSV**\n- **AMR**\n- **FLAC**\n- **WAV**\n- **OGG**\n- **MP3**\n- **MP4**\n- **WEBM**\n- **XML**\n\n\n", "summary": "Add File", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddFileRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/AddFileRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/AddFileRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_image/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_image_create", "description": "\nThis endpoint enables you to upload images into your project.
Upon upload, the file will be processed and stored as text embeddings within the project's database.\n", "summary": "Add Image", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/AddImageRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_text/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_text_create", "description": "Add text data in your project, which will be stored as embeddings\nwithin your chosen database provider.", "summary": "Add Texts", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddTextRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_url/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_url_create", "description": "Add a list of URLs into your projects,\nthey will be processed and stored as text embeddings within your project.", "summary": "Add Urls", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddUrlRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_video/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_video_create", "description": "\nThis endpoint enables you to upload videos into your project.
Upon upload, the file will be processed and stored as text embeddings within the project's database.\n", "summary": "Add Video", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddVideoRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/AddVideoRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/ask_llm/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_ask_llm_create", "description": "Retrieve a list of search query responses and compare them to your\ninput. Provide a query, and in return, receive scores for the most relevant items from your project,\nranked by their proximity to your query.", "summary": "Ask LLM", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Query operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskLLMRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaQueryResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/conversations/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_list", "summary": "List Conversations", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/Conversation"}}}}, "description": ""}}}, "post": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_create", "summary": "Create Conversation", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Create operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/ConversationRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ConversationRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Conversation"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/conversations/{conversation_id}/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_retrieve", "summary": "Retrieve Conversation Details", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetail"}}}, "description": ""}}}, "put": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_update", "summary": "Update Conversation Name", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Update operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetailRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/ConversationDetailRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ConversationDetailRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetail"}}}, "description": ""}}}, "patch": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_partial_update", "summary": "Update Conversation Name", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Update operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedConversationDetailRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedConversationDetailRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedConversationDetailRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetail"}}}, "description": ""}}}, "delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_destroy", "summary": "Delete Conversation", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/create_bot_prompt/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_create_bot_prompt_create", "description": "Create a default bot prompt for your RAG project.\nThe bot prompt serves as the default system message or personality definition for your RAG-powered chatbot. This prompt helps define how your bot should interact with users and what context it should consider when processing queries.", "summary": "Create Bot Profile", "parameters": [{"in": "query", "name": "file_urls", "schema": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, {"in": "query", "name": "model", "schema": {"type": "string", "minLength": 1}, "description": "The model used to call the prompt. E.g. openai/gpt-4o", "required": true}, {"in": "query", "name": "name", "schema": {"type": "string", "minLength": 1}, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed.", "required": true}, {"in": "query", "name": "params", "schema": {}, "description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "query", "name": "system_prompt", "schema": {"type": "string", "minLength": 1}, "description": "Specify a system prompt for the LLM"}, {"in": "query", "name": "text", "schema": {"type": "string", "minLength": 1}, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```", "required": true}], "tags": ["Custom Chatbot (Create operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/delete_all_chunks/": {"delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_delete_all_chunks_destroy", "description": "Delete all chunks from your project.\nThis action is irreversible and will remove all data stored in your project.", "summary": "Delete All Chunks", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaDeleteResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/delete_chunk/": {"delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_delete_chunk_destroy", "description": "Delete one or multiple chunks from your project. \nYou can delete a single chunk by providing it ID either as a query parameter or in the request body. \nFor bulk deletion, provide a list of chunk IDs in the request body.\n\nExamples:\n- Single chunk deletion (query parameter): DELETE /chunks?id=chunk123\n- Single chunk deletion (body): {\"id\": \"chunk123\"}\n- Bulk deletion: {\"ids\": [\"chunk123\", \"chunk456\", \"chunk789\"]}", "summary": "Delete Chunk", "parameters": [{"in": "query", "name": "chunk_ids", "schema": {"type": "array", "items": {"type": "string", "minLength": 1, "description": "chunk_id"}}, "description": "List of chunk_ids to delete"}, {"in": "query", "name": "id", "schema": {"type": "string", "minLength": 1}, "description": "chunk_id"}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaDeleteResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/files/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_files_list", "summary": "List Files", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/AiProductFile"}}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/files/{file_id}/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_files_retrieve", "summary": "Get File", "parameters": [{"in": "path", "name": "file_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AiProductFile"}}}, "description": ""}}}, "delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_files_destroy", "summary": "Delete File", "parameters": [{"in": "path", "name": "file_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/generate/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_generate_create", "description": "Interact with different LLM providers, this endpoint is based on the structure of the\nLLM API.", "summary": "Generate", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Query operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/GenerateRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaQueryResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/info/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_info_retrieve", "description": "Retrieve details about your project within your Ask YODA project,\nincluding the total number of items stored in your project collection and default models", "summary": "Get info", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaInfoResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/list_chunks_ids/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_list_chunks_ids_create", "description": "Retrieve a list of all chunk IDs stored in your project.", "summary": "List Check IDs", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListChunkRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaListChunksIdsResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/query/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_query_create", "description": "Interact with your data by selecting your preferred Language Model \nprovider. The chosen provider will then respond to queries based on the data you have added\nto your collection", "summary": "Query", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Query operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskLLMRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaAskLlmResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/remove_bot_prompt/": {"delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_remove_bot_prompt_destroy", "description": "Remove the bot prompt from your RAG project.\n This endpoint removes the custom bot prompt from your project, effectively resetting the bot's personality to system defaults. This action cannot be undone, but you can always set a new bot prompt later.", "summary": "Remove Bot Prompt", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/update_project/": {"patch": {"operationId": "aiproducts_aiproducts_askyoda_v2_update_project_partial_update", "description": "Update the default settings of the Yoda project.\nIt allows you to modify the project's default settings,\nspecifically changing the llm_provider (language model provider),\nllm_model (language model), ocr_provider (upload pdf), speech_to_text provider (upload audio)\nand the default chunks parameter associated with the default project.", "summary": "Update Project", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Update operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedAskYodaProjectUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskYodaProjectUpdate"}}}, "description": ""}}}}, "/aiproducts/delete/{project_id}/": {"delete": {"operationId": "aiproducts_aiproducts_delete_destroy", "description": "View to delete an AI project.", "summary": "Delete Project", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/audio/speech_to_text_async/": {"get": {"operationId": "audio_audio_speech_to_text_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Speech to Text List Jobs", "tags": ["Speech To Text Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "audio_audio_speech_to_text_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|0.024 (per 60 seconde)|15 seconde\n|**google**|-|`v1p1beta1`|0.024 (per 60 seconde)|1 seconde\n|**microsoft**|-|`v1.0`|0.0168 (per 60 seconde)|1 seconde\n|**assembly**|-|`v2`|0.011 (per 60 seconde)|1 seconde\n|**deepgram**|**nova-3**|`v1`|0.0052 (per 60 seconde)|1 seconde\n|**deepgram**|**enhanced**|`v1`|0.0189 (per 60 seconde)|1 seconde\n|**deepgram**|-|`v1`|0.0189 (per 60 seconde)|1 seconde\n|**deepgram**|**base**|`v1`|0.0169 (per 60 seconde)|1 seconde\n|**openai**|-|`boto3 (v1.15.18)`|0.006 (per 60 seconde)|1 seconde\n|**gladia**|-|`v1`|0.0102 (per 60 seconde)|1 seconde\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Bashkir**|`ba`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hebrew**|`he`|\n|**Hebrew**|`iw`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Interlingua (International Auxiliary Language Association)**|`ia`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lingala**|`ln`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Mandarin Chinese**|`cmn`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Occitan (post 1500)**|`oc`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`mo`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Serbian**|`sr`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wu Chinese**|`wuu`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n|**at**|`at`|\n|**jp**|`jp`|\n|**mymr**|`mymr`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Afrikaans (South Africa)**|`af-ZA`|\n|**Albanian (Albania)**|`sq-AL`|\n|**Amharic (Ethiopia)**|`am-ET`|\n|**Arabic (Algeria)**|`ar-DZ`|\n|**Arabic (Bahrain)**|`ar-BH`|\n|**Arabic (Egypt)**|`ar-EG`|\n|**Arabic (Iraq)**|`ar-IQ`|\n|**Arabic (Israel)**|`ar-IL`|\n|**Arabic (Jordan)**|`ar-JO`|\n|**Arabic (Kuwait)**|`ar-KW`|\n|**Arabic (Lebanon)**|`ar-LB`|\n|**Arabic (Libya)**|`ar-LY`|\n|**Arabic (Mauritania)**|`ar-MR`|\n|**Arabic (Morocco)**|`ar-MA`|\n|**Arabic (Oman)**|`ar-OM`|\n|**Arabic (Palestinian Territories)**|`ar-PS`|\n|**Arabic (Qatar)**|`ar-QA`|\n|**Arabic (Saudi Arabia)**|`ar-SA`|\n|**Arabic (Syria)**|`ar-SY`|\n|**Arabic (Tunisia)**|`ar-TN`|\n|**Arabic (United Arab Emirates)**|`ar-AE`|\n|**Arabic (Yemen)**|`ar-YE`|\n|**Armenian (Armenia)**|`hy-AM`|\n|**Azerbaijani (Azerbaijan)**|`az-AZ`|\n|**Bangla (Bangladesh)**|`bn-BD`|\n|**Bangla (India)**|`bn-IN`|\n|**Basque (Spain)**|`eu-ES`|\n|**Bosnian (Bosnia & Herzegovina)**|`bs-BA`|\n|**Bulgarian (Bulgaria)**|`bg-BG`|\n|**Burmese (Myanmar (Burma))**|`my-MM`|\n|**Cantonese (China)**|`yue-CN`|\n|**Cantonese (Traditional, Hong Kong SAR China)**|`yue-Hant-HK`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Hong Kong SAR China)**|`zh-HK`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Croatian (Croatia)**|`hr-HR`|\n|**Czech (Czechia)**|`cs-CZ`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Belgium)**|`nl-BE`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (Australia)**|`en-AU`|\n|**English (Canada)**|`en-CA`|\n|**English (Ghana)**|`en-GH`|\n|**English (Hong Kong SAR China)**|`en-HK`|\n|**English (India)**|`en-IN`|\n|**English (Ireland)**|`en-IE`|\n|**English (Kenya)**|`en-KE`|\n|**English (New Zealand)**|`en-NZ`|\n|**English (Nigeria)**|`en-NG`|\n|**English (Pakistan)**|`en-PK`|\n|**English (Philippines)**|`en-PH`|\n|**English (Singapore)**|`en-SG`|\n|**English (South Africa)**|`en-ZA`|\n|**English (Tanzania)**|`en-TZ`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United Kingdom)**|`en-UK`|\n|**English (United States)**|`en-US`|\n|**Estonian (Estonia)**|`et-EE`|\n|**Filipino (Philippines)**|`fil-PH`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (Belgium)**|`fr-BE`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**Galician (Spain)**|`gl-ES`|\n|**Georgian (Georgia)**|`ka-GE`|\n|**German (Austria)**|`de-AT`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Greek (Greece)**|`el-GR`|\n|**Gujarati (India)**|`gu-IN`|\n|**Hebrew (Israel)**|`he-IL`|\n|**Hebrew (Israel)**|`iw-IL`|\n|**Hindi (India)**|`hi-IN`|\n|**Hindi (Latin)**|`hi-Latn`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Icelandic (Iceland)**|`is-IS`|\n|**Indonesian (Indonesia)**|`id-ID`|\n|**Irish (Ireland)**|`ga-IE`|\n|**Italian (Italy)**|`it-IT`|\n|**Italian (Switzerland)**|`it-CH`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Javanese (Indonesia)**|`jv-ID`|\n|**Kannada (India)**|`kn-IN`|\n|**Kazakh (Kazakhstan)**|`kk-KZ`|\n|**Khmer (Cambodia)**|`km-KH`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Lao (Laos)**|`lo-LA`|\n|**Latvian (Latvia)**|`lv-LV`|\n|**Lithuanian (Lithuania)**|`lt-LT`|\n|**Macedonian (North Macedonia)**|`mk-MK`|\n|**Malay (Malaysia)**|`ms-MY`|\n|**Malayalam (India)**|`ml-IN`|\n|**Maltese (Malta)**|`mt-MT`|\n|**Marathi (India)**|`mr-IN`|\n|**Mongolian (Mongolia)**|`mn-MN`|\n|**Nepali (Nepal)**|`ne-NP`|\n|**Norwegian (Norway)**|`no-NO`|\n|**Norwegian Bokm\u00e5l (Norway)**|`nb-NO`|\n|**Pashto (Afghanistan)**|`ps-AF`|\n|**Persian (Iran)**|`fa-IR`|\n|**Polish (Poland)**|`pl-PL`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Punjabi (India)**|`pa-Guru-IN`|\n|**Romanian (Romania)**|`ro-RO`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Serbia)**|`sr-RS`|\n|**Sinhala (Sri Lanka)**|`si-LK`|\n|**Slovak (Slovakia)**|`sk-SK`|\n|**Slovenian (Slovenia)**|`sl-SI`|\n|**Somali (Somalia)**|`so-SO`|\n|**Spanish (Argentina)**|`es-AR`|\n|**Spanish (Bolivia)**|`es-BO`|\n|**Spanish (Chile)**|`es-CL`|\n|**Spanish (Colombia)**|`es-CO`|\n|**Spanish (Costa Rica)**|`es-CR`|\n|**Spanish (Cuba)**|`es-CU`|\n|**Spanish (Dominican Republic)**|`es-DO`|\n|**Spanish (Ecuador)**|`es-EC`|\n|**Spanish (El Salvador)**|`es-SV`|\n|**Spanish (Equatorial Guinea)**|`es-GQ`|\n|**Spanish (Guatemala)**|`es-GT`|\n|**Spanish (Honduras)**|`es-HN`|\n|**Spanish (Latin America)**|`es-419`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Nicaragua)**|`es-NI`|\n|**Spanish (Panama)**|`es-PA`|\n|**Spanish (Paraguay)**|`es-PY`|\n|**Spanish (Peru)**|`es-PE`|\n|**Spanish (Puerto Rico)**|`es-PR`|\n|**Spanish (Spain)**|`es-ES`|\n|**Spanish (United States)**|`es-US`|\n|**Spanish (Uruguay)**|`es-UY`|\n|**Spanish (Venezuela)**|`es-VE`|\n|**Sundanese (Indonesia)**|`su-ID`|\n|**Swahili (Kenya)**|`sw-KE`|\n|**Swahili (Tanzania)**|`sw-TZ`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Tamil (India)**|`ta-IN`|\n|**Tamil (Malaysia)**|`ta-MY`|\n|**Tamil (Singapore)**|`ta-SG`|\n|**Tamil (Sri Lanka)**|`ta-LK`|\n|**Telugu (India)**|`te-IN`|\n|**Thai (Thailand)**|`th-TH`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Ukrainian (Ukraine)**|`uk-UA`|\n|**Urdu (India)**|`ur-IN`|\n|**Urdu (Pakistan)**|`ur-PK`|\n|**Uzbek (Uzbekistan)**|`uz-UZ`|\n|**Vietnamese (Vietnam)**|`vi-VN`|\n|**Welsh (United Kingdom)**|`cy-GB`|\n|**Wu Chinese (China)**|`wuu-CN`|\n|**Zulu (South Africa)**|`zu-ZA`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**deepgram**|`enhanced`|\n\n
", "summary": "Speech to Text Launch Job", "tags": ["Speech To Text Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/SpeechToTextAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/SpeechToTextAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "audio_audio_speech_to_text_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Speech to text delete Jobs", "tags": ["Speech To Text Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/audio/speech_to_text_async/{public_id}/": {"get": {"operationId": "audio_audio_speech_to_text_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Speech to Text Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Speech To Text Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncaudiospeech_to_text_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/audio/text_to_speech/": {"post": {"operationId": "audio_audio_text_to_speech_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|4.0 (per 1000000 char)|1 char\n|**amazon**|**Neural**|`boto3 (v1.15.18)`|16.0 (per 1000000 char)|1 char\n|**google**|-|`v1`|4.0 (per 1000000 char)|1 char\n|**google**|**Standard**|`v1`|4.0 (per 1000000 char)|1 char\n|**google**|**Neural**|`v1`|16.0 (per 1000000 char)|1 char\n|**google**|**Wavenet**|`v1`|16.0 (per 1000000 char)|1 char\n|**google**|**Studio**|`v1`|0.16 (per 1000 char)|1 char\n|**google**|**gemini-2.5-flash-tts**|`v1`|10.0 (per 1000000 char)|1 char\n|**google**|**gemini-2.5-pro-tts**|`v1`|20.0 (per 1000000 char)|None char\n|**microsoft**|-|`v1.0`|16.0 (per 1000000 char)|1 char\n|**lovoai**|-|`v1`|160.0 (per 1000000 char)|1000 char\n|**elevenlabs**|-|`v1`|0.3 (per 1000 char)|1 char\n|**openai**|-|`v1.0`|0.015 (per 1000 char)|1 char\n|**deepgram**|-|`v1`|0.015 (per 1000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Macedonian**|`mk`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Mandarin Chinese**|`cmn`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Serbian**|`sr`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Standard Arabic**|`arb`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wu Chinese**|`wuu`|\n|**Xhosa**|`xh`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans (South Africa)**|`af-ZA`|\n|**Albanian (Albania)**|`sq-AL`|\n|**Amharic (Ethiopia)**|`am-ET`|\n|**Arabic (Algeria)**|`ar-DZ`|\n|**Arabic (Bahrain)**|`ar-BH`|\n|**Arabic (Egypt)**|`ar-EG`|\n|**Arabic (Iraq)**|`ar-IQ`|\n|**Arabic (Jordan)**|`ar-JO`|\n|**Arabic (Kuwait)**|`ar-KW`|\n|**Arabic (Lebanon)**|`ar-LB`|\n|**Arabic (Libya)**|`ar-LY`|\n|**Arabic (Morocco)**|`ar-MA`|\n|**Arabic (Oman)**|`ar-OM`|\n|**Arabic (Pseudo-Accents)**|`ar-XA`|\n|**Arabic (Qatar)**|`ar-QA`|\n|**Arabic (Saudi Arabia)**|`ar-SA`|\n|**Arabic (Syria)**|`ar-SY`|\n|**Arabic (Tunisia)**|`ar-TN`|\n|**Arabic (United Arab Emirates)**|`ar-AE`|\n|**Arabic (Yemen)**|`ar-YE`|\n|**Armenian (Armenia)**|`hy-AM`|\n|**Azerbaijani (Azerbaijan)**|`az-AZ`|\n|**Bangla (Bangladesh)**|`bn-BD`|\n|**Bangla (India)**|`bn-IN`|\n|**Basque (Spain)**|`eu-ES`|\n|**Bosnian (Bosnia & Herzegovina)**|`bs-BA`|\n|**Bulgarian (Bulgaria)**|`bg-BG`|\n|**Burmese (Myanmar (Burma))**|`my-MM`|\n|**Cantonese (China)**|`yue-CN`|\n|**Cantonese (Hong Kong SAR China)**|`yue-HK`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-CN-henan`|\n|**Chinese (China)**|`zh-CN-shandong`|\n|**Chinese (China)**|`zh-CN-sichuan`|\n|**Chinese (Hong Kong SAR China)**|`zh-HK`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Croatian (Croatia)**|`hr-HR`|\n|**Czech (Czechia)**|`cs-CZ`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Belgium)**|`nl-BE`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (Australia)**|`en-AU`|\n|**English (Canada)**|`en-CA`|\n|**English (Cura\u00e7ao)**|`en-AN`|\n|**English (Hong Kong SAR China)**|`en-HK`|\n|**English (India)**|`en-IN`|\n|**English (Ireland)**|`en-IE`|\n|**English (Kenya)**|`en-KE`|\n|**English (New Zealand)**|`en-NZ`|\n|**English (Nigeria)**|`en-NG`|\n|**English (Philippines)**|`en-PH`|\n|**English (Singapore)**|`en-SG`|\n|**English (South Africa)**|`en-ZA`|\n|**English (Tanzania)**|`en-TZ`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**Estonian (Estonia)**|`et-EE`|\n|**Filipino (Philippines)**|`fil-PH`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (Belgium)**|`fr-BE`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**Galician (Spain)**|`gl-ES`|\n|**Georgian (Georgia)**|`ka-GE`|\n|**German (Austria)**|`de-AT`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Greek (Greece)**|`el-GR`|\n|**Gujarati (India)**|`gu-IN`|\n|**Hebrew (Israel)**|`he-IL`|\n|**Hindi (India)**|`hi-IN`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Icelandic (Iceland)**|`is-IS`|\n|**Indonesian (Indonesia)**|`id-ID`|\n|**Irish (Ireland)**|`ga-IE`|\n|**Italian (Italy)**|`it-IT`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Javanese (Indonesia)**|`jv-ID`|\n|**Kannada (India)**|`kn-IN`|\n|**Kazakh (Kazakhstan)**|`kk-KZ`|\n|**Khmer (Cambodia)**|`km-KH`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Lao (Laos)**|`lo-LA`|\n|**Latvian (Latvia)**|`lv-LV`|\n|**Lithuanian (Lithuania)**|`lt-LT`|\n|**Macedonian (North Macedonia)**|`mk-MK`|\n|**Malay (Malaysia)**|`ms-MY`|\n|**Malayalam (India)**|`ml-IN`|\n|**Maltese (Malta)**|`mt-MT`|\n|**Mandarin Chinese (China)**|`cmn-CN`|\n|**Mandarin Chinese (Taiwan)**|`cmn-TW`|\n|**Marathi (India)**|`mr-IN`|\n|**Mongolian (Mongolia)**|`mn-MN`|\n|**Nepali (Nepal)**|`ne-NP`|\n|**Norwegian Bokm\u00e5l (Norway)**|`nb-NO`|\n|**Pashto (Afghanistan)**|`ps-AF`|\n|**Persian (Iran)**|`fa-IR`|\n|**Polish (Poland)**|`pl-PL`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Punjabi (India)**|`pa-IN`|\n|**Romanian (Romania)**|`ro-RO`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Serbia)**|`sr-RS`|\n|**Sinhala (Sri Lanka)**|`si-LK`|\n|**Slovak (Slovakia)**|`sk-SK`|\n|**Slovenian (Slovenia)**|`sl-SI`|\n|**Somali (Somalia)**|`so-SO`|\n|**Spanish (Argentina)**|`es-AR`|\n|**Spanish (Bolivia)**|`es-BO`|\n|**Spanish (Chile)**|`es-CL`|\n|**Spanish (Colombia)**|`es-CO`|\n|**Spanish (Costa Rica)**|`es-CR`|\n|**Spanish (Cuba)**|`es-CU`|\n|**Spanish (Dominican Republic)**|`es-DO`|\n|**Spanish (Ecuador)**|`es-EC`|\n|**Spanish (El Salvador)**|`es-SV`|\n|**Spanish (Equatorial Guinea)**|`es-GQ`|\n|**Spanish (Guatemala)**|`es-GT`|\n|**Spanish (Honduras)**|`es-HN`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Nicaragua)**|`es-NI`|\n|**Spanish (Panama)**|`es-PA`|\n|**Spanish (Paraguay)**|`es-PY`|\n|**Spanish (Peru)**|`es-PE`|\n|**Spanish (Puerto Rico)**|`es-PR`|\n|**Spanish (Spain)**|`es-ES`|\n|**Spanish (United States)**|`es-US`|\n|**Spanish (Uruguay)**|`es-UY`|\n|**Spanish (Venezuela)**|`es-VE`|\n|**Sundanese (Indonesia)**|`su-ID`|\n|**Swahili (Kenya)**|`sw-KE`|\n|**Swahili (Tanzania)**|`sw-TZ`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Tamil (India)**|`ta-IN`|\n|**Tamil (Malaysia)**|`ta-MY`|\n|**Tamil (Singapore)**|`ta-SG`|\n|**Tamil (Sri Lanka)**|`ta-LK`|\n|**Telugu (India)**|`te-IN`|\n|**Thai (Thailand)**|`th-TH`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Ukrainian (Ukraine)**|`uk-UA`|\n|**Urdu (India)**|`ur-IN`|\n|**Urdu (Pakistan)**|`ur-PK`|\n|**Uzbek (United Kingdom)**|`uz-UK`|\n|**Uzbek (Uzbekistan)**|`uz-UZ`|\n|**Vietnamese (Vietnam)**|`vi-VN`|\n|**Welsh (United Kingdom)**|`cy-GB`|\n|**Wu Chinese (China)**|`wuu-CN`|\n|**Xhosa (South Africa)**|`xh-ZA`|\n|**Zulu (South Africa)**|`zu-ZA`|\n\n

Supported Models

\n\n", "summary": "Text to Speech", "tags": ["Text To Speech"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/audiotext_to_speechResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/audio/text_to_speech_async/": {"get": {"operationId": "audio_audio_text_to_speech_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Text To Speech list jobs", "tags": ["Text To Speech Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "audio_audio_text_to_speech_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**lovoai**|-|`v1`|0.16 (per 1000 char)|1 char\n|**amazon**|-|`boto3 (v1.15.18)`|4.0 (per 1000000 char)|1 char\n|**amazon**|**Neural**|`boto3 (v1.15.18)`|16.0 (per 1000000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Macedonian**|`mk`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Mandarin Chinese**|`cmn`|\n|**Marathi**|`mr`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Serbian**|`sr`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Standard Arabic**|`arb`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wu Chinese**|`wuu`|\n|**Xhosa**|`xh`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans (South Africa)**|`af-ZA`|\n|**Albanian (Albania)**|`sq-AL`|\n|**Amharic (Ethiopia)**|`am-ET`|\n|**Arabic (Algeria)**|`ar-DZ`|\n|**Arabic (Bahrain)**|`ar-BH`|\n|**Arabic (Egypt)**|`ar-EG`|\n|**Arabic (Iraq)**|`ar-IQ`|\n|**Arabic (Jordan)**|`ar-JO`|\n|**Arabic (Kuwait)**|`ar-KW`|\n|**Arabic (Lebanon)**|`ar-LB`|\n|**Arabic (Libya)**|`ar-LY`|\n|**Arabic (Morocco)**|`ar-MA`|\n|**Arabic (Oman)**|`ar-OM`|\n|**Arabic (Qatar)**|`ar-QA`|\n|**Arabic (Saudi Arabia)**|`ar-SA`|\n|**Arabic (Tunisia)**|`ar-TN`|\n|**Arabic (United Arab Emirates)**|`ar-AE`|\n|**Armenian (Armenia)**|`hy-AM`|\n|**Azerbaijani (Azerbaijan)**|`az-AZ`|\n|**Bangla (India)**|`bn-IN`|\n|**Basque (Spain)**|`eu-ES`|\n|**Bosnian (Bosnia & Herzegovina)**|`bs-BA`|\n|**Burmese (Myanmar (Burma))**|`my-MM`|\n|**Cantonese (China)**|`yue-CN`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-CN-henan`|\n|**Chinese (China)**|`zh-CN-shandong`|\n|**Chinese (China)**|`zh-CN-sichuan`|\n|**Chinese (Hong Kong SAR China)**|`zh-HK`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Croatian (Croatia)**|`hr-HR`|\n|**Czech (Czechia)**|`cs-CZ`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Belgium)**|`nl-BE`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (Australia)**|`en-AU`|\n|**English (Canada)**|`en-CA`|\n|**English (Cura\u00e7ao)**|`en-AN`|\n|**English (Hong Kong SAR China)**|`en-HK`|\n|**English (India)**|`en-IN`|\n|**English (Ireland)**|`en-IE`|\n|**English (Kenya)**|`en-KE`|\n|**English (New Zealand)**|`en-NZ`|\n|**English (Nigeria)**|`en-NG`|\n|**English (Philippines)**|`en-PH`|\n|**English (Singapore)**|`en-SG`|\n|**English (South Africa)**|`en-ZA`|\n|**English (Tanzania)**|`en-TZ`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**Estonian (Estonia)**|`et-EE`|\n|**Filipino (Philippines)**|`fil-PH`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (Belgium)**|`fr-BE`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**Galician (Spain)**|`gl-ES`|\n|**Georgian (Georgia)**|`ka-GE`|\n|**German (Austria)**|`de-AT`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Hebrew (Israel)**|`he-IL`|\n|**Hindi (India)**|`hi-IN`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Icelandic (Iceland)**|`is-IS`|\n|**Indonesian (Indonesia)**|`id-ID`|\n|**Irish (Ireland)**|`ga-IE`|\n|**Italian (Italy)**|`it-IT`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Javanese (Indonesia)**|`jv-ID`|\n|**Kazakh (Kazakhstan)**|`kk-KZ`|\n|**Khmer (Cambodia)**|`km-KH`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Lao (Laos)**|`lo-LA`|\n|**Latvian (Latvia)**|`lv-LV`|\n|**Lithuanian (Lithuania)**|`lt-LT`|\n|**Macedonian (North Macedonia)**|`mk-MK`|\n|**Malay (Malaysia)**|`ms-MY`|\n|**Malayalam (India)**|`ml-IN`|\n|**Maltese (Malta)**|`mt-MT`|\n|**Mandarin Chinese (China)**|`cmn-CN`|\n|**Marathi (India)**|`mr-IN`|\n|**Mongolian (Mongolia)**|`mn-MN`|\n|**Nepali (Nepal)**|`ne-NP`|\n|**Norwegian Bokm\u00e5l (Norway)**|`nb-NO`|\n|**Pashto (Afghanistan)**|`ps-AF`|\n|**Persian (Iran)**|`fa-IR`|\n|**Polish (Poland)**|`pl-PL`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Romanian (Romania)**|`ro-RO`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Serbia)**|`sr-RS`|\n|**Sinhala (Sri Lanka)**|`si-LK`|\n|**Slovak (Slovakia)**|`sk-SK`|\n|**Slovenian (Slovenia)**|`sl-SI`|\n|**Somali (Somalia)**|`so-SO`|\n|**Spanish (Argentina)**|`es-AR`|\n|**Spanish (Bolivia)**|`es-BO`|\n|**Spanish (Chile)**|`es-CL`|\n|**Spanish (Colombia)**|`es-CO`|\n|**Spanish (Costa Rica)**|`es-CR`|\n|**Spanish (Cuba)**|`es-CU`|\n|**Spanish (Ecuador)**|`es-EC`|\n|**Spanish (El Salvador)**|`es-SV`|\n|**Spanish (Equatorial Guinea)**|`es-GQ`|\n|**Spanish (Guatemala)**|`es-GT`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Nicaragua)**|`es-NI`|\n|**Spanish (Panama)**|`es-PA`|\n|**Spanish (Paraguay)**|`es-PY`|\n|**Spanish (Puerto Rico)**|`es-PR`|\n|**Spanish (Spain)**|`es-ES`|\n|**Spanish (United States)**|`es-US`|\n|**Spanish (Uruguay)**|`es-UY`|\n|**Spanish (Venezuela)**|`es-VE`|\n|**Sundanese (Indonesia)**|`su-ID`|\n|**Swahili (Kenya)**|`sw-KE`|\n|**Swahili (Tanzania)**|`sw-TZ`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Tamil (India)**|`ta-IN`|\n|**Tamil (Malaysia)**|`ta-MY`|\n|**Tamil (Singapore)**|`ta-SG`|\n|**Telugu (India)**|`te-IN`|\n|**Thai (Thailand)**|`th-TH`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Ukrainian (Ukraine)**|`uk-UA`|\n|**Urdu (India)**|`ur-IN`|\n|**Urdu (Pakistan)**|`ur-PK`|\n|**Uzbek (United Kingdom)**|`uz-UK`|\n|**Vietnamese (Vietnam)**|`vi-VN`|\n|**Welsh (United Kingdom)**|`cy-GB`|\n|**Wu Chinese (China)**|`wuu-CN`|\n|**Xhosa (South Africa)**|`xh-ZA`|\n|**Zulu (South Africa)**|`zu-ZA`|\n\n

Supported Models

\n\n", "summary": "Text to Speech launch job", "tags": ["Text To Speech Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/TextToSpeechAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/TextToSpeechAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "audio_audio_text_to_speech_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Text To Speech delete Jobs", "tags": ["Text To Speech Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/audio/text_to_speech_async/{public_id}/": {"get": {"operationId": "audio_audio_text_to_speech_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Text To Speech Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Text To Speech Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncaudiotext_to_speech_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/batch/": {"get": {"operationId": "batch_batch_list", "summary": "List Batch Jobs", "tags": ["Batch"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/BatchList"}}}}, "description": ""}}}}, "/cost_management/": {"get": {"operationId": "cost_management_cost_management_retrieve", "summary": "Monitor Consumptions", "parameters": [{"in": "query", "name": "begin", "schema": {"type": "string", "format": "date"}, "required": true}, {"in": "query", "name": "end", "schema": {"type": "string", "format": "date"}, "required": true}, {"in": "query", "name": "provider", "schema": {"type": "string", "minLength": 1, "maxLength": 200}}, {"in": "query", "name": "rag_project_id", "schema": {"type": "string", "format": "uuid"}}, {"in": "query", "name": "step", "schema": {"type": "integer", "maximum": 4, "minimum": 1}, "required": true}, {"in": "query", "name": "subfeature", "schema": {"type": "string", "minLength": 1, "maxLength": 200}}, {"in": "query", "name": "token", "schema": {"type": "string", "minLength": 1}}, {"in": "query", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}}], "tags": ["Cost Monitoring"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CostMonitoringResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/cost_management/credits/": {"get": {"operationId": "cost_management_", "description": "Get you current credits", "summary": "My Credits", "tags": ["Cost Monitoring"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/credits_serializer"}}}, "description": ""}}}}, "/enabled-features/": {"get": {"operationId": "enabled-features_enabled_features_retrieve", "description": "List all enabled features.", "tags": ["enabled-features"], "security": [{"jwtAuth": []}, {}], "responses": {"200": {"description": "No response body"}}}}, "/image/ai_detection/": {"post": {"operationId": "image_image_ai_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**winstonai**|`v1`|0.021 (per 1 request)|1 request\n\n\n
\n\n", "summary": "AI Detection", "tags": ["Ai Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageai_detectionAiDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageai_detectionAiDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageai_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/anonymization/": {"post": {"operationId": "image_image_anonymization_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**api4ai**|`v1.0.0`|25.0 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Anonymization", "tags": ["Anonymization"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/create_project/": {"post": {"operationId": "image_image_automl_classification_create_project_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Automl Classification - Create Project", "tags": ["Automl Classification"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationcreate_projectAutomlClassificationCreateProjectRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationcreate_projectAutomlClassificationCreateProjectRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/delete_project/": {"post": {"operationId": "image_image_automl_classification_delete_project_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Automl Classification - Delete Project", "tags": ["Automl Classification"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationdelete_projectAutomlClassificationDeleteRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationdelete_projectAutomlClassificationDeleteRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/list_projects/": {"get": {"operationId": "image_image_automl_classification_list_projects_retrieve", "description": "List Automl Classification Projects", "summary": "Automl Classification - List Projects", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationListProjectsResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/predict_async/": {"get": {"operationId": "image_image_automl_classification_predict_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Automl Classification Predict List Job", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "image_image_automl_classification_predict_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|0.005 (per 1 request)|1 request\n\n\n
\n\n", "summary": "Automl Classification Predict Launch Job", "tags": ["Automl Classification"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AutomlClassificationPredictRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationPredictRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}}, "/image/automl_classification/predict_async/{public_id}/": {"get": {"operationId": "image_image_automl_classification_predict_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Automl Classification - Predict Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/train_async/": {"get": {"operationId": "image_image_automl_classification_train_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Automl Classification Train List Job", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "image_image_automl_classification_train_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Automl Classification Train Launch Job", "tags": ["Automl Classification"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AutomlClassificationTrainRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationTrainRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}}, "/image/automl_classification/train_async/{public_id}/": {"get": {"operationId": "image_image_automl_classification_train_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Automl Classification - Train Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/upload_data_async/": {"get": {"operationId": "image_image_automl_classification_upload_data_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Automl Classification Upload Data List Job", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "image_image_automl_classification_upload_data_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|0.0005 (per 1 file)|1 file\n\n\n
\n\n", "summary": "Automl Classification Upload Data Launch Job", "tags": ["Automl Classification"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AutomlClassificationUploadDataRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationUploadDataRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}}, "/image/automl_classification/upload_data_async/{public_id}/": {"get": {"operationId": "image_image_automl_classification_upload_data_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Automl Classification - Upload Data Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/background_removal/": {"post": {"operationId": "image_image_background_removal_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**api4ai**|`v1.0.0`|50.0 (per 1000 file)|1 file\n|**photoroom**|`v1`|20.0 (per 1000 file)|1 file\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**stabilityai**|`v2Beta`|0.02 (per 1 request)|1 request\n|**clipdrop**|`v1Beta`|0.5 (per 1 request)|1 request\n|**picsart**|`1.0`|0.04 (per 1 image)|1 image\n\n\n
\n\n", "summary": "Background Removal", "tags": ["Background Removal"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagebackground_removalResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/deepfake_detection/": {"post": {"operationId": "image_image_deepfake_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sightengine**|`v1beta`|0.002 (per 1 image)|1 image\n\n\n
\n\n", "summary": "Deepfake Detection", "tags": ["Deepfake Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagedeepfake_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/embeddings/": {"post": {"operationId": "image_image_embeddings_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`v1`|0.06 (per 1000 file)|1 file\n|**google**|**gemini-embedding-001**|`v1`|0.0 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Italian**|`it`|\n|**Spanish**|`es`|\n\n

Supported Models

\n\n", "summary": "Embeddings", "tags": ["Embeddings"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageembeddingsEmbeddingsRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageembeddingsEmbeddingsRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageembeddingsResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/explicit_content/": {"post": {"operationId": "image_image_explicit_content_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000 file)|1 file\n|**clarifai**|-|`8.0.0`|2.0 (per 1000 file)|1 file\n|**google**|-|`v1`|1.5 (per 1000 file)|1 file\n|**microsoft**|-|`v3.2`|1.0 (per 1000 file)|1 file\n|**sentisight**|-|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**openai**|**gpt-4o**|`v1`|24.0 (per 1000 file)|1 file\n|**openai**|-|`v1`|24.0 (per 1000 file)|1 file\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Explicit Content Detection", "tags": ["Explicit Content"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageexplicit_contentResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_compare/": {"post": {"operationId": "image_image_face_compare_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**base64**|`latest`|0.25 (per 1 request)|1 request\n|**facepp**|`v3`|2.0 (per 1000 request)|1 request\n|**amazon**|`boto3 (v1.15.18)`|1.0 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Comparison", "tags": ["Face Compare"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_compareFaceCompareRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_compareFaceCompareRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_compareResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_detection/": {"post": {"operationId": "image_image_face_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|1.0 (per 1000 file)|1 file\n|**clarifai**|`8.0.0`|2.0 (per 1000 file)|1 file\n|**google**|`v1`|1.5 (per 1000 file)|1 file\n|**api4ai**|`v1.0.0`|0.75 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Face Detection", "tags": ["Face Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_detectionFaceDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_detectionFaceDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/add_face/": {"post": {"operationId": "image_image_face_recognition_add_face_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|1.0 (per 1000 image)|1 image\n|**facepp**|`v3`|0.6 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - Add Face", "tags": ["Face Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionadd_faceFaceRecognitionAddFaceRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_recognitionadd_faceFaceRecognitionAddFaceRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/delete_face/": {"post": {"operationId": "image_image_face_recognition_delete_face_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|free|-\n|**facepp**|`v3`|0.1 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - Delete Face", "tags": ["Face Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitiondelete_faceFaceRecognitionDeleteFaceRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/list_faces/": {"get": {"operationId": "image_image_face_recognition_list_faces_retrieve", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|free|-\n|**facepp**|`v3`|0.1 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - List Faces", "parameters": [{"in": "query", "name": "attributes_as_list", "schema": {"type": "boolean", "default": false}, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, {"in": "query", "name": "fallback_providers", "schema": {"type": "array", "items": {"type": "string"}, "default": [], "maxItems": 5}, "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n "}, {"in": "query", "name": "providers", "schema": {"type": "array", "items": {"type": "string", "minLength": 1}}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)", "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, {"in": "query", "name": "settings", "schema": {"type": "string", "default": {}}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}], "tags": ["Face Recognition"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/recognize/": {"post": {"operationId": "image_image_face_recognition_recognize_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|1.0 (per 1000 file)|1 file\n|**facepp**|`v3`|2.0 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - Recognize Face", "tags": ["Face Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionrecognizeFaceRecognitionDetectFaceRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_recognitionrecognizeFaceRecognitionDetectFaceRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/generation/": {"post": {"operationId": "image_image_generation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Resolution|Price|Billing unit|\n|----|----|-------|------|-----|------------|\n|**openai**|**dall-e-3**|`v1Beta`|`1024x1024`|0.04 (per 1 image)|1 image\n|**openai**|**dall-e-3**|`v1Beta`|`512x512`|0.04 (per 1 image)|1 image\n|**openai**|**dall-e-2**|`v1Beta`|`256x256`|0.016 (per 1 image)|1 image\n|**openai**|**dall-e-2**|`v1Beta`|`512x512`|0.018 (per 1 image)|1 image\n|**openai**|**dall-e-2**|`v1Beta`|`1024x1024`|0.02 (per 1 image)|1 image\n|**openai**|**dall-e-3**|`v1Beta`|`1024x1792`|0.08 (per 1 image)|1 image\n|**openai**|**dall-e-3**|`v1Beta`|`1792x1024`|0.08 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`1024x1024`|0.04 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`512x512`|0.018 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`1024x1792`|0.08 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`1792x1024`|0.08 (per 1 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1024x1024`|15.0 (per 1000 image)|1 image\n|**stabilityai**|-|`v1Beta`|`1024x1024`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1152x896`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`896x1152`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1216x832`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1344x768`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`768x1344`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1536x640`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`640x1536`|15.0 (per 1000 image)|1 image\n|**replicate**|**anime-style**|`v1`|`1024x1024`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**anime-style**|`v1`|`256x256`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|-|`v1`|-|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**classic**|`v1`|`512x512`|0.00115 (per 1 exec_time)|1 exec_time\n|**replicate**|**anime-style**|`v1`|`512x512`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**vintedois-diffusion**|`v1`|`512x512`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**vintedois-diffusion**|`v1`|`1024x1024`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**vintedois-diffusion**|`v1`|`256x256`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**classic**|`v1`|`1024x1024`|0.00115 (per 1 exec_time)|1 exec_time\n|**replicate**|**classic**|`v1`|`256x256`|0.00115 (per 1 exec_time)|1 exec_time\n|**leonardo**|**Leonardo Phoenix**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**Leonardo Phoenix**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Lightning XL**|`v1`|`512x512`|0.011 (per 1 image)|1 image\n|**leonardo**|**Leonardo Lightning XL**|`v1`|`1024x1024`|0.012 (per 1 image)|1 image\n|**leonardo**|**Leonardo Anime XL**|`v1`|`512x512`|0.011 (per 1 image)|1 image\n|**leonardo**|**Leonardo Anime XL**|`v1`|`1024x1024`|0.012 (per 1 image)|1 image\n|**leonardo**|**Leonardo Kino XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Kino XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**Leonardo Vision XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Vision XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**Leonardo Diffusion XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Diffusion XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**AlbedoBase XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**AlbedoBase XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**SDXL 0.9**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**SDXL 0.9**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|-|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|-|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**minimax**|**image-01**|`v1`|-|0.0035 (per 1 image)|1 image\n|**minimax**|-|`v1`|-|0.0035 (per 1 image)|1 image\n|**bytedance**|**seedream-5-0-260128**|`v3`|-|0.035 (per 1 token)|1 token\n|**bytedance**|**seedream-4-0-250828**|`v3`|-|0.03 (per 1 request)|1 request\n|**bytedance**|**seedream-4-5-251128**|`v3`|-|0.03 (per 1 request)|1 request\n|**bytedance**|-|`v3`|-|0.03 (per 1 request)|1 request\n|**bytedance**|**seedream-3-0-t2i-250415**|`v3`|-|0.03 (per 1 request)|1 request\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`dall-e-3`|\n|**stabilityai**|`stable-diffusion-xl-1024-v1-0`|\n|**replicate**|`classic`|\n|**leonardo**|`Leonardo Phoenix`|\n|**minimax**|`image-01`|\n|**bytedance**|`seedream-3-0-t2i-250415`|\n\n
", "summary": "Image generation", "tags": ["Generation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagegenerationGenerationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagegenerationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/landmark_detection/": {"post": {"operationId": "image_image_landmark_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`v1`|1.5 (per 1000 file)|1 file\n|**microsoft**|`v3.2`|1.0 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Landmark Detection", "tags": ["Landmark Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagelandmark_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/logo_detection/": {"post": {"operationId": "image_image_logo_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|-|`v1`|1.5 (per 1000 file)|1 file\n|**microsoft**|-|`v3.2`|1.0 (per 1000 file)|1 file\n|**api4ai**|-|`v1.0.0`|0.25 (per 1000 file)|1 file\n|**api4ai**|**v1**|`v1.0.0`|0.25 (per 1000 file)|1 file\n|**api4ai**|**v2**|`v1.0.0`|2.5 (per 1000 file)|1 file\n|**clarifai**|-|`8.0.0`|2.0 (per 1000 file)|1 file\n|**openai**|-|`v1`|24.0 (per 1000 file)|1 file\n|**openai**|**gpt-4o**|`v1`|24.0 (per 1000 file)|1 file\n|**openai**|**gpt-4-turbo**|`v1`|48.0 (per 1000 file)|1 file\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**api4ai**|`v1`|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Logo Detection", "tags": ["Logo Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagelogo_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/object_detection/": {"post": {"operationId": "image_image_object_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000 file)|1 file\n|**api4ai**|-|`1.9.2`|0.5 (per 1000 file)|1 file\n|**clarifai**|-|`8.0.0`|2.0 (per 1000 file)|1 file\n|**clarifai**|**general-image-detection**|`8.0.0`|2.0 (per 1000 file)|1 file\n|**google**|-|`v1`|2.25 (per 1000 file)|1 file\n|**microsoft**|-|`v3.2`|1.0 (per 1000 file)|1 file\n|**sentisight**|-|`v3.3.1`|0.75 (per 1000 file)|1 file\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**clarifai**|`general-image-detection`|\n\n
", "summary": "Object Detection", "tags": ["Object Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageobject_detectionObjectDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageobject_detectionObjectDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageobject_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/question_answer/": {"post": {"operationId": "image_image_question_answer_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|**gpt-4-turbo**|`v1`|30.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1`|8.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**o1**|`v1`|60.0 (per 1000000 token)|1 token\n|**openai**|**o1-mini**|`v1`|12.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|0.6 (per 1000000 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Italian**|`it`|\n|**Spanish**|`es`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**google**|`gemini-1.5-pro`|\n\n
", "summary": "Question Answer", "tags": ["Question Answer"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagequestion_answerResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/delete_image/": {"post": {"operationId": "image_image_search_delete_image_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|free|-\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Search - Delete phase", "tags": ["Search"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchdelete_imageDeleteImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagesearchdelete_imageDeleteImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/get_image/": {"get": {"operationId": "image_image_search_get_image_retrieve", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|free|-\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Search - get image", "parameters": [{"in": "query", "name": "attributes_as_list", "schema": {"type": "boolean", "default": false}, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, {"in": "query", "name": "fallback_providers", "schema": {"type": "array", "items": {"type": "string"}, "default": [], "maxItems": 5}, "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n "}, {"in": "query", "name": "image_name", "schema": {"type": "string", "minLength": 1}, "required": true}, {"in": "query", "name": "providers", "schema": {"type": "array", "items": {"type": "string", "minLength": 1}}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)", "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, {"in": "query", "name": "settings", "schema": {"type": "string", "default": {}}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}], "tags": ["Search"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/get_images/": {"get": {"operationId": "image_image_search_get_images_retrieve", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|free|-\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Search - list all images", "parameters": [{"in": "query", "name": "attributes_as_list", "schema": {"type": "boolean", "default": false}, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, {"in": "query", "name": "fallback_providers", "schema": {"type": "array", "items": {"type": "string"}, "default": [], "maxItems": 5}, "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n "}, {"in": "query", "name": "providers", "schema": {"type": "array", "items": {"type": "string", "minLength": 1}}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)", "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, {"in": "query", "name": "settings", "schema": {"type": "string", "default": {}}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}], "tags": ["Search"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/launch_similarity/": {"post": {"operationId": "image_image_search_launch_similarity_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**nyckel**|`v1.0.0`|1.0 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Search - launch similarity", "tags": ["Search"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchlaunch_similaritySearchImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagesearchlaunch_similaritySearchImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/upload_image/": {"post": {"operationId": "image_image_search_upload_image_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**nyckel**|`v1.0.0`|0.5 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Search - Upload Phase", "tags": ["Search"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchupload_imageUploadImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagesearchupload_imageUploadImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/info/provider_subfeatures/": {"get": {"operationId": "info_info_provider_subfeatures_list", "description": "List Provider and features relations : You can get a list of *all providers* for a *feature* or *all features* for a *given provider*.\n\nYou can have the detailed information on the **pricing**, **supported languages** as well as the **models** for providers who propose different models (eg: voice models available for a text to speech provider).\n\nExample : If you want the detailed list of all providers that can analyse the sentiment of a text written in french, you'd need to set `feature__name=text`, `subfeature__name=sentiment_analysis` and `languages=fr`.\n\nWhich will look like the following :\n\n\n```bash\ncurl --request GET https://api.edenai.run/v2/info/provider_subfeatures?subfeature__name=sentiment_analysis&feature__name=text&languages=fr\n```", "summary": "List Providers Subfeatures", "parameters": [{"in": "query", "name": "feature__name", "schema": {"type": "string"}}, {"in": "query", "name": "gender", "schema": {"type": "string"}, "description": "Accepts two values: either 'male' or 'female'. Used to filter models voices for the text_to_speech subfeature"}, {"in": "query", "name": "is_working", "schema": {"type": "boolean"}}, {"in": "query", "name": "language", "schema": {"type": "string"}, "description": "languages [icontains]"}, {"in": "query", "name": "phase__name", "schema": {"type": "string"}}, {"in": "query", "name": "provider__name", "schema": {"type": "string"}}, {"in": "query", "name": "subfeature__name", "schema": {"type": "string"}}], "tags": ["Infos"], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/ProviderSubfeature"}}}}, "description": ""}}}}, "/llm/chat/": {"post": {"operationId": "llm_llm_chat_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|**amazon.nova-lite-v1:0**|`llmengine (v2)`|0.24 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-micro-v1:0**|`llmengine (v2)`|0.14 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`llmengine (v2)`|3.2 (per 1000000 token)|1 token\n|**anthropic**|**claude-opus-4-6**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-20250514**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1-20250805**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5**|`v1`|2.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-20241022**|`v1`|4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-latest**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5-20251001**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20250219**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-haiku-20240307**|`v1`|1.25e-06 (per 1 token)|1 token\n|**anthropic**|**claude-4-opus-20250514**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-4-sonnet-20250514**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5-20250929**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-20250514**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5-20251101**|`v1`|2.5e-05 (per 1 token)|1 token\n|**cohere**|**command-r7b-12-2024**|`llmengine (v2)`|0.15 (per 1000000 token)|1 token\n|**cohere**|**command-r-08-2024**|`llmengine (v2)`|0.6 (per 1000000 token)|1 token\n|**deepseek**|**deepseek-chat**|`llmengine (v2)`|1.1e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-reasoner**|`llmengine (v2)`|2.19e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-coder**|`llmengine (v2)`|2.8e-07 (per 1 token)|1 token\n|**meta**|**meta.llama3-1-405b-instruct-v1:0**|`llmengine (v2)`|2.4 (per 1000000 token)|1 token\n|**meta**|**meta.llama3-1-70b-instruct-v1:0**|`llmengine (v2)`|0.72 (per 1000000 token)|1 token\n|**meta**|**meta.llama3-1-8b-instruct-v1:0**|`llmengine (v2)`|0.22 (per 1000000 token)|1 token\n|**mistral**|**magistral-medium-2506**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-2506**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-latest**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**pixtral-large-latest**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-small-latest**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**codestral-latest**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-latest**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2405**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2508**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**mistral**|**devstral-medium-2507**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**devstral-small-2505**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-small-2507**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**labs-devstral-small-2512**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-2512**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-2509**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-latest**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-latest**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2402**|`llmengine (v2)`|1.2e-05 (per 1 token)|1 token\n|**mistral**|**mistral-large-2407**|`llmengine (v2)`|9e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2411**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium**|`llmengine (v2)`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2312**|`llmengine (v2)`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2505**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**mistral-small**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**mistral-tiny**|`llmengine (v2)`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-7b**|`llmengine (v2)`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo-2407**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x22b**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x7b**|`llmengine (v2)`|7e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-12b-2409**|`llmengine (v2)`|1.5e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-large-2411**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`llmengine (v2)`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-pro**|`llmengine (v2)`|100.0 (per 1000000 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**o3-mini**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`llmengine (v2)`|1.4e-05 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`llmengine (v2)`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`llmengine (v2)`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`llmengine (v2)`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-72B-Instruct-Turbo**|`llmengine (v2)`|1.2e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`llmengine (v2)`|1.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-72B-Instruct-Turbo**|`llmengine (v2)`|1.2 (per 1000000 token)|1 token\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-120b**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Thinking-2507**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-fp8-tput**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1**|`llmengine (v2)`|7e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1-0528-tput**|`llmengine (v2)`|2.19e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3**|`llmengine (v2)`|1.25e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3.1**|`llmengine (v2)`|1.7e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8**|`llmengine (v2)`|8.5e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Scout-17B-16E-Instruct**|`llmengine (v2)`|5.9e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo**|`llmengine (v2)`|3.5e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`llmengine (v2)`|1.8e-07 (per 1 token)|1 token\n|**together_ai**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-20b**|`llmengine (v2)`|2e-07 (per 1 token)|1 token\n|**together_ai**|**zai-org/GLM-4.5-Air-FP8**|`llmengine (v2)`|1.1e-06 (per 1 token)|1 token\n|**together_ai**|**zai-org/GLM-4.6**|`llmengine (v2)`|2.2e-06 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct-0905**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Instruct**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Thinking**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Instruct-2507-tput**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo**|`llmengine (v2)`|3.5e-06 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`llmengine (v2)`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`llmengine (v2)`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`llmengine (v2)`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-image-preview**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-3.1-pro-preview**|`llmengine (v2)`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-3-pro-image-preview**|`llmengine (v2)`|12.0 (per 1000000 token)|1 token\n|**google**|**gemini-3-flash-preview**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`llmengine (v2)`|0.0 (per 1 token)|1 token\n|**groq**|**llama-3.1-8b-instant**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-120b**|`v1`|7.5e-07 (per 1 token)|1 token\n|**groq**|**llama-3.3-70b-versatile**|`v1`|7.9e-07 (per 1 token)|1 token\n|**groq**|**llama-3.3-70b-versatile**|`v1`|7.9e-07 (per 1 token)|1 token\n|**groq**|**llama-3.1-8b-instant**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-guard-4-12b**|`v1`|2e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-maverick-17b-128e-instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-scout-17b-16e-instruct**|`v1`|3.4e-07 (per 1 token)|1 token\n|**groq**|**moonshotai/kimi-k2-instruct-0905**|`v1`|3e-06 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-20b**|`v1`|5e-07 (per 1 token)|1 token\n|**groq**|**qwen/qwen3-32b**|`v1`|5.9e-07 (per 1 token)|1 token\n|**microsoft**|**gpt-4o**|`Azure AI Foundry`|5.0 (per 1000000 token)|1 token\n|**microsoft**|**o3-mini**|`Azure AI Foundry`|4.4 (per 1000000 token)|1 token\n|**microsoft**|**o1-mini**|`Azure AI Foundry`|12.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-4o-mini**|`Azure AI Foundry`|0.66 (per 1000000 token)|1 token\n|**microsoft**|**gpt-4**|`Azure AI Foundry`|60.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-35-turbo-16k**|`Azure AI Foundry`|4.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-35-turbo**|`Azure AI Foundry`|1.5 (per 1000000 token)|1 token\n|**minimax**|**minimax-m1**|`v1`|2.2 (per 1000000 token)|1 token\n|**minimax**|**minimax-text-01**|`v1`|1.1 (per 1000000 token)|1 token\n|**minimax**|**MiniMax-M2.1**|`v1`|1.2e-06 (per 1 token)|1 token\n|**minimax**|**MiniMax-M2.1-lightning**|`v1`|2.4e-06 (per 1 token)|1 token\n|**minimax**|**MiniMax-M2**|`v1`|1.2e-06 (per 1 token)|1 token\n|**bytedance**|**seed-1-6-250915**|`llmengine (v2)`|2.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar**|`llmengine (v2)`|1.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar**|`llmengine (v2)`|1e-06 (per 1 token)|1 token\n|**perplexityai**|**sonar-pro**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**perplexityai**|**sonar-deep-research**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**perplexityai**|**sonar-pro**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**perplexityai**|**sonar-reasoning-pro**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**deepinfra**|**Gryphe/MythoMax-L2-13b**|`v1`|9e-08 (per 1 token)|1 token\n|**deepinfra**|**NousResearch/Hermes-3-Llama-3.1-405B**|`v1`|1e-06 (per 1 token)|1 token\n|**deepinfra**|**NousResearch/Hermes-3-Llama-3.1-70B**|`v1`|3e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/QwQ-32B**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen2.5-72B-Instruct**|`v1`|3.9e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen2.5-7B-Instruct**|`v1`|1e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen2.5-VL-32B-Instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-14B**|`v1`|2.4e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-235B-A22B**|`v1`|5.4e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-235B-A22B-Instruct-2507**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-235B-A22B-Thinking-2507**|`v1`|2.9e-06 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-30B-A3B**|`v1`|2.9e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-32B**|`v1`|2.8e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-Coder-480B-A35B-Instruct**|`v1`|1.6e-06 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo**|`v1`|1.2e-06 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-Next-80B-A3B-Instruct**|`v1`|1.4e-06 (per 1 token)|1 token\n|**deepinfra**|**Sao10K/L3-8B-Lunaris-v1-Turbo**|`v1`|5e-08 (per 1 token)|1 token\n|**deepinfra**|**Sao10K/L3.1-70B-Euryale-v2.2**|`v1`|7.5e-07 (per 1 token)|1 token\n|**deepinfra**|**Sao10K/L3.3-70B-Euryale-v2.3**|`v1`|7.5e-07 (per 1 token)|1 token\n|**deepinfra**|**anthropic/claude-3-7-sonnet-latest**|`v1`|1.65e-05 (per 1 token)|1 token\n|**deepinfra**|**anthropic/claude-4-opus**|`v1`|8.25e-05 (per 1 token)|1 token\n|**deepinfra**|**anthropic/claude-4-sonnet**|`v1`|1.65e-05 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1**|`v1`|2.4e-06 (per 1 token)|1 token\n|**deepinfra**|**nvidia/Llama-3.3-Nemotron-Super-49B-v1.5**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-0528**|`v1`|2.15e-06 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-Distill-Llama-70B**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-Distill-Qwen-32B**|`v1`|2.7e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-Turbo**|`v1`|3e-06 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3**|`v1`|8.9e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3-0324**|`v1`|8.8e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3.1**|`v1`|1e-06 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3.1-Terminus**|`v1`|1e-06 (per 1 token)|1 token\n|**deepinfra**|**google/gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**google/gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**deepinfra**|**google/gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**deepinfra**|**google/gemma-3-12b-it**|`v1`|1e-07 (per 1 token)|1 token\n|**deepinfra**|**google/gemma-3-27b-it**|`v1`|1.6e-07 (per 1 token)|1 token\n|**deepinfra**|**google/gemma-3-4b-it**|`v1`|8e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.2-11B-Vision-Instruct**|`v1`|4.9e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.2-3B-Instruct**|`v1`|2e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.3-70B-Instruct**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`v1`|3.9e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-4-Scout-17B-16E-Instruct**|`v1`|3e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-Guard-3-8B**|`v1`|5.5e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-Guard-4-12B**|`v1`|1.8e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3-8B-Instruct**|`v1`|6e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-70B-Instruct**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`v1`|2.8e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-8B-Instruct**|`v1`|5e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`v1`|3e-08 (per 1 token)|1 token\n|**deepinfra**|**microsoft/WizardLM-2-8x22B**|`v1`|4.8e-07 (per 1 token)|1 token\n|**deepinfra**|**microsoft/phi-4**|`v1`|1.4e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-Nemo-Instruct-2407**|`v1`|4e-08 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-Small-24B-Instruct-2501**|`v1`|8e-08 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-Small-3.2-24B-Instruct-2506**|`v1`|2e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**moonshotai/Kimi-K2-Instruct**|`v1`|2e-06 (per 1 token)|1 token\n|**deepinfra**|**moonshotai/Kimi-K2-Instruct-0905**|`v1`|2e-06 (per 1 token)|1 token\n|**deepinfra**|**nvidia/Llama-3.1-Nemotron-70B-Instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**nvidia/NVIDIA-Nemotron-Nano-9B-v2**|`v1`|1.6e-07 (per 1 token)|1 token\n|**deepinfra**|**openai/gpt-oss-120b**|`v1`|4.5e-07 (per 1 token)|1 token\n|**deepinfra**|**openai/gpt-oss-20b**|`v1`|1.5e-07 (per 1 token)|1 token\n|**deepinfra**|**zai-org/GLM-4.5**|`v1`|1.6e-06 (per 1 token)|1 token\n|**deepinfra**|**lizpreciatior/lzlv_70b_fp16_hf**|`v1`|9e-07 (per 1 token)|1 token\n|**deepinfra**|**Gryphe/MythoMax-L2-13b**|`v1`|2.2e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-7B-Instruct-v0.1**|`v1`|1.3e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-2-70b-chat-hf**|`v1`|9e-07 (per 1 token)|1 token\n|**deepinfra**|**cognitivecomputations/dolphin-2.6-mixtral-8x7b**|`v1`|2.7e-07 (per 1 token)|1 token\n|**deepinfra**|**Phind/Phind-CodeLlama-34B-v2**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`v1`|2.7e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-2-13b-chat-hf**|`v1`|2.2e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3-8B-Instruct**|`v1`|8e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3-70B-Instruct**|`v1`|7.9e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-405B-Instruct**|`v1`|9e-07 (per 1 token)|1 token\n|**deepinfra**|**openchat/openchat_3.5**|`v1`|1.3e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-0528-Turbo**|`v1`|3e-06 (per 1 token)|1 token\n|**cerebras**|**llama3.1-8b**|`llmengine (v2)`|1e-07 (per 1 token)|1 token\n|**cerebras**|**gpt-oss-120b**|`llmengine (v2)`|6.9e-07 (per 1 token)|1 token\n|**cloudflare**|**@cf/meta/llama-2-7b-chat-fp16**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**cloudflare**|**@cf/meta/llama-2-7b-chat-int8**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**cloudflare**|**@cf/mistral/mistral-7b-instruct-v0.1**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**cloudflare**|**@hf/thebloke/codellama-7b-instruct-awq**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**databricks**|**databricks-claude-3-7-sonnet**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-haiku-4-5**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**databricks**|**databricks-claude-opus-4-1**|`llmengine (v2)`|7.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-opus-4-5**|`llmengine (v2)`|2.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-sonnet-4**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-sonnet-4-5**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gemini-2-5-flash**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**databricks**|**databricks-gemini-2-5-pro**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gemma-3-12b**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5-1**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5-mini**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5-nano**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-oss-120b**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-oss-20b**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**databricks**|**databricks-llama-4-maverick**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**databricks**|**databricks-meta-llama-3-1-405b-instruct**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-meta-llama-3-1-8b-instruct**|`llmengine (v2)`|4.5e-07 (per 1 token)|1 token\n|**databricks**|**databricks-meta-llama-3-3-70b-instruct**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen2p5-vl-32b-instruct**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-235b-a22b**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-235b-a22b-instruct-2507**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-235b-a22b-thinking-2507**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-8b**|`llmengine (v2)`|2e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-235b-a22b-instruct**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-235b-a22b-thinking**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-30b-a3b-instruct**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-30b-a3b-thinking**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-r1-0528**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3-0324**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3p1**|`llmengine (v2)`|1.68e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3p1-terminus**|`llmengine (v2)`|1.68e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3p2**|`llmengine (v2)`|1.68e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/glm-4p6**|`llmengine (v2)`|2.19e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/gpt-oss-120b**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/gpt-oss-20b**|`llmengine (v2)`|2e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/kimi-k2-instruct-0905**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/kimi-k2-thinking**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-coder-480b-a35b-instruct**|`llmengine (v2)`|1.8e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/llama-v3p3-70b-instruct**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/minimax-m2**|`llmengine (v2)`|1.2e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/mixtral-8x22b-instruct**|`llmengine (v2)`|1.2e-06 (per 1 token)|1 token\n|**ovhcloud**|**DeepSeek-R1-Distill-Llama-70B**|`llmengine (v2)`|6.7e-07 (per 1 token)|1 token\n|**ovhcloud**|**Llama-3.1-8B-Instruct**|`llmengine (v2)`|1e-07 (per 1 token)|1 token\n|**ovhcloud**|**Meta-Llama-3_3-70B-Instruct**|`llmengine (v2)`|6.7e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mistral-7B-Instruct-v0.3**|`llmengine (v2)`|1e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mistral-Nemo-Instruct-2407**|`llmengine (v2)`|1.3e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mistral-Small-3.2-24B-Instruct-2506**|`llmengine (v2)`|2.8e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mixtral-8x7B-Instruct-v0.1**|`llmengine (v2)`|6.3e-07 (per 1 token)|1 token\n|**ovhcloud**|**Qwen2.5-Coder-32B-Instruct**|`llmengine (v2)`|8.7e-07 (per 1 token)|1 token\n|**ovhcloud**|**Qwen2.5-VL-72B-Instruct**|`llmengine (v2)`|9.1e-07 (per 1 token)|1 token\n|**ovhcloud**|**Qwen3-32B**|`llmengine (v2)`|2.3e-07 (per 1 token)|1 token\n|**ovhcloud**|**gpt-oss-120b**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**ovhcloud**|**gpt-oss-20b**|`llmengine (v2)`|1.5e-07 (per 1 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**amazon**|`amazon.nova-pro-v1:0`|\n|**anthropic**|`claude-3-7-sonnet-latest`|\n|**cohere**|`command-r`|\n|**deepseek**|`deepseek-chat`|\n|**meta**|`meta.llama3-1-70b-instruct-v1:0`|\n|**mistral**|`mistral-large-latest`|\n|**openai**|`gpt-4o`|\n|**together_ai**|`Qwen/Qwen2.5-72B-Instruct-Turbo`|\n|**xai**|`grok-2-latest`|\n|**google**|`gemini-2.0-flash`|\n|**groq**|`llama-3.3-70b-versatile`|\n|**microsoft**|`gpt-4o`|\n|**minimax**|`MiniMax-M1`|\n|**bytedance**|`seed-1-6-250915`|\n|**perplexityai**|`sonar`|\n|**deepinfra**|`nvidia/Llama-3.3-Nemotron-Super-49B-v1.5`|\n|**cerebras**|`gpt-oss-120b`|\n\n
", "summary": "Chat", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/llmchatllmchatChatRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/llmchatResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/multimodal/chat/": {"post": {"operationId": "multimodal_multimodal_chat_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**anthropic**|**claude-3-5-haiku-latest**|`bedrock-2023-05-31`|4.0 (per 1000000 token)|1 token\n|**anthropic**|-|`bedrock-2023-05-31`|15.0 (per 1000000 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20250219**|`bedrock-2023-05-31`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|-|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|18.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1000000 token)|1 token\n|**openai**|**o1**|`v1Beta`|60.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4-turbo**|`v1Beta`|30.0 (per 1000000 token)|1 token\n|**openai**|-|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1Beta`|8.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v1Beta`|1.6 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1Beta`|0.6 (per 1000000 token)|1 token\n|**openai**|**gpt-5.2**|`v1Beta`|14.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5**|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5-mini**|`v1Beta`|2.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5-nano**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**o1-mini**|`v1Beta`|12.0 (per 1000000 token)|1 token\n|**openai**|**o1-2024-12-17**|`v1Beta`|60.0 (per 1000000 token)|1 token\n|**xai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|10.0 (per 1000000 token)|1 token\n|**amazon**|-|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**mistral**|-|`v0.0.1`|6.0 (per 1000000 token)|1 token\n|**mistral**|**pixtral-large-latest**|`v0.0.1`|6.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-4o**|`Azure AI Foundry`|15.0 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**anthropic**|`claude-3-5-sonnet-latest`|\n|**google**|`gemini-2.0-flash`|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-vision-latest`|\n|**amazon**|`amazon.nova-pro-v1:0`|\n|**mistral**|`pixtral-large-latest`|\n|**microsoft**|`gpt-4o`|\n\n
", "summary": "Chat", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/multimodalchatChatRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/multimodalchatResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/anonymization_async/": {"get": {"operationId": "ocr_ocr_anonymization_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Anonymization List Job", "tags": ["Anonymization Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_anonymization_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**readyredact**|`v1`|0.05 (per 1 file)|1 file\n|**base64**|`v1`|0.25 (per 1 page)|1 page\n|**privateai**|`v3`|0.01 (per 1 page)|1 page\n\n\n
\n\n", "summary": "Anonymization Launch Job", "tags": ["Anonymization Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AnonymizationAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AnonymizationAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_anonymization_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Anonymization delete Jobs", "tags": ["Anonymization Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/anonymization_async/{public_id}/": {"get": {"operationId": "ocr_ocr_anonymization_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Anonymization Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Anonymization Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocranonymization_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/bank_check_parsing/": {"post": {"operationId": "ocr_ocr_bank_check_parsing_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**base64**|`latest`|0.25 (per 1 page)|1 page\n|**veryfi**|`v8`|0.16 (per 1 request)|1 request\n|**mindee**|`v1`|0.1 (per 1 page)|1 page\n|**extracta**|`v1`|0.1 (per 1 page)|1 page\n\n\n
\n\n", "summary": "Bank Check Parsing", "tags": ["Bank Check Parsing"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrbank_check_parsingResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/custom_document_parsing_async/": {"get": {"operationId": "ocr_ocr_custom_document_parsing_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Custom Document Parsing List Job", "tags": ["Custom Document Parsing Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_custom_document_parsing_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|15.0 (per 1000 page)|1 page\n|**extracta**|`v1`|0.1 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Italian**|`it`|\n|**Portuguese**|`pt`|\n|**Spanish**|`es`|\n\n
", "summary": "Custom Document Parsing Launch Job", "tags": ["Custom Document Parsing Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_custom_document_parsing_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Custom Document Parsing delete Jobs", "tags": ["Custom Document Parsing Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/custom_document_parsing_async/{public_id}/": {"get": {"operationId": "ocr_ocr_custom_document_parsing_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Custom Document Parsing Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Custom Document Parsing Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocrcustom_document_parsing_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/data_extraction/": {"post": {"operationId": "ocr_ocr_data_extraction_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|0.05 (per 1 page)|1 page\n|**base64**|`latest`|0.25 (per 1 page)|1 page\n\n\n
\n\n", "summary": "Data Extraction", "tags": ["Data Extraction"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrdata_extractionDataExtractionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrdata_extractionDataExtractionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrdata_extractionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/financial_parser/": {"post": {"operationId": "ocr_ocr_financial_parser_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Document Type|Price|Billing unit|\n|----|----|-------|------|-----|------------|\n|**affinda**|-|`v3`|`invoice`|0.08 (per 1 page)|1 page\n|**affinda**|-|`v3`|`receipt`|0.07 (per 1 page)|1 page\n|**amazon**|-|`boto3 1.26.8`|-|0.01 (per 1 page)|1 page\n|**base64**|-|`latest`|-|0.25 (per 1 page)|1 page\n|**google**|-|`DocumentAI v1 beta3`|`invoice`|0.01 (per 1 page)|10 page\n|**google**|-|`DocumentAI v1 beta3`|`receipt`|0.01 (per 1 page)|10 page\n|**klippa**|-|`v1`|-|0.1 (per 1 file)|1 file\n|**microsoft**|-|`rest API 4.0 (2024-02-29-preview)`|-|0.01 (per 1 page)|1 page\n|**mindee**|-|`v1.2`|-|0.1 (per 1 page)|1 page\n|**tabscanner**|-|`latest`|-|0.08 (per 1 page)|1 page\n|**veryfi**|-|`v8`|`receipt`|0.08 (per 1 file)|1 file\n|**veryfi**|-|`v8`|`invoice`|0.16 (per 1 file)|1 file\n|**eagledoc**|-|`v1`|-|0.03 (per 1 page)|1 page\n|**extracta**|-|`v1`|-|0.1 (per 1 page)|1 page\n|**openai**|**gpt-4o**|`v1.0`|-|0.04 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-cn`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Taiwan)**|`zh-tw`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Italian (Italy)**|`it-IT`|\n|**Italian (Switzerland)**|`it-CH`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Spanish (Spain)**|`es-ES`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Financial Parser", "tags": ["Financial Parser"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrfinancial_parserResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/identity_parser/": {"post": {"operationId": "ocr_ocr_identity_parser_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|0.025 (per 1 page)|1 page\n|**base64**|-|`latest`|0.2 (per 1 page)|1 page\n|**microsoft**|-|`rest API 4.0 (2024-02-29-preview)`|0.01 (per 1 page)|1 page\n|**mindee**|-|`v2`|0.1 (per 1 page)|1 page\n|**klippa**|-|`v1`|0.1 (per 1 file)|1 file\n|**affinda**|-|`v3`|0.07 (per 1 file)|1 file\n|**openai**|**gpt-4o**|`v1`|0.02 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Arabic**|`ar`|\n|**Bengali**|`bn`|\n|**Bulgarian**|`bg`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Kannada**|`kn`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Macedonian**|`mk`|\n|**Malayalam**|`ml`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Vietnamese**|`vi`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-cn`|\n|**Chinese (Taiwan)**|`zh-tw`|\n|**English (United States)**|`en-US`|\n|**French (France)**|`fr-FR`|\n|**German (Germany)**|`de-DE`|\n|**Italian (Italy)**|`it-IT`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Spanish (Spain)**|`es-ES`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Identity Parser", "tags": ["Identity Parser"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocridentity_parserIdentityParserRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocridentity_parserIdentityParserRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocridentity_parserResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/ocr/": {"post": {"operationId": "ocr_ocr_ocr_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|1.5 (per 1000 page)|1 page\n|**google**|`v1`|1.5 (per 1000 page)|1 page\n|**microsoft**|`v3.2`|1.0 (per 1000 page)|1 page\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**api4ai**|`v1.0.0`|3.0 (per 1000 request)|1 request\n|**mistral**|`v1`|1.0 (per 1000 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Abaza**|`abq`|\n|**Adyghe**|`ady`|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Angika**|`anp`|\n|**Arabic**|`ar`|\n|**Assamese**|`as`|\n|**Asturian**|`ast`|\n|**Avaric**|`av`|\n|**Awadhi**|`awa`|\n|**Azerbaijani**|`az`|\n|**Bagheli**|`bfy`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bhojpuri**|`bho`|\n|**Bihari languages**|`bh`|\n|**Bislama**|`bi`|\n|**Bodo (India)**|`brx`|\n|**Bosnian**|`bs`|\n|**Braj**|`bra`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Bundeli**|`bns`|\n|**Buriat**|`bua`|\n|**Camling**|`rab`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chamorro**|`ch`|\n|**Chechen**|`ce`|\n|**Chhattisgarhi**|`hne`|\n|**Chinese**|`zh`|\n|**Cornish**|`kw`|\n|**Corsican**|`co`|\n|**Crimean Tatar**|`crh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dargwa**|`dar`|\n|**Dari**|`prs`|\n|**Dhimal**|`dhi`|\n|**Dogri (macrolanguage)**|`doi`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Erzya**|`myv`|\n|**Estonian**|`et`|\n|**Faroese**|`fo`|\n|**Fijian**|`fj`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Friulian**|`fur`|\n|**Gagauz**|`gag`|\n|**Galician**|`gl`|\n|**German**|`de`|\n|**Gilbertese**|`gil`|\n|**Goan Konkani**|`gom`|\n|**Gondi**|`gon`|\n|**Gurung**|`gvr`|\n|**Haitian**|`ht`|\n|**Halbi**|`hlb`|\n|**Hani**|`hni`|\n|**Haryanvi**|`bgc`|\n|**Hawaiian**|`haw`|\n|**Hindi**|`hi`|\n|**Hmong Daw**|`mww`|\n|**Ho**|`hoc`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Inari Sami**|`smn`|\n|**Indonesian**|`id`|\n|**Ingush**|`inh`|\n|**Interlingua (International Auxiliary Language Association)**|`ia`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Jaunsari**|`jns`|\n|**Javanese**|`jv`|\n|**K'iche'**|`quc`|\n|**Kabardian**|`kbd`|\n|**Kabuverdianu**|`kea`|\n|**Kachin**|`kac`|\n|**Kalaallisut**|`kl`|\n|**Kangri**|`xnr`|\n|**Kara-Kalpak**|`kaa`|\n|**Karachay-Balkar**|`krc`|\n|**Kashubian**|`csb`|\n|**Kazakh**|`kk`|\n|**Khaling**|`klr`|\n|**Khasi**|`kha`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Korku**|`kfq`|\n|**Koryak**|`kpy`|\n|**Kosraean**|`kos`|\n|**Kumarbhag Paharia**|`kmj`|\n|**Kumyk**|`kum`|\n|**Kurdish**|`ku`|\n|**Kurukh**|`kru`|\n|**K\u00f6lsch**|`ksh`|\n|**Lak**|`lbe`|\n|**Lakota**|`lkt`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lezghian**|`lez`|\n|**Lithuanian**|`lt`|\n|**Lower Sorbian**|`dsb`|\n|**Lule Sami**|`smj`|\n|**Luxembourgish**|`lb`|\n|**Mahasu Pahari**|`bfz`|\n|**Maithili**|`mai`|\n|**Malay (macrolanguage)**|`ms`|\n|**Maltese**|`mt`|\n|**Manx**|`gv`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Marshallese**|`mh`|\n|**Mongolian**|`mn`|\n|**Montenegrin**|`cnr`|\n|**Neapolitan**|`nap`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Newari**|`new`|\n|**Niuean**|`niu`|\n|**Nogai**|`nog`|\n|**Northern Sami**|`se`|\n|**Norwegian**|`no`|\n|**Occitan (post 1500)**|`oc`|\n|**Old English (ca. 450-1100)**|`ang`|\n|**Ossetian**|`os`|\n|**Pali**|`pi`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Romansh**|`rm`|\n|**Russian**|`ru`|\n|**Sadri**|`sck`|\n|**Samoan**|`sm`|\n|**Sanskrit**|`sa`|\n|**Santali**|`sat`|\n|**Scots**|`sco`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Sherpa**|`xsr`|\n|**Sirmauri**|`srx`|\n|**Skolt Sami**|`sms`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sami**|`sma`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tabassaran**|`tab`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tatar**|`tt`|\n|**Tetum**|`tet`|\n|**Thangmi**|`thf`|\n|**Tonga (Tonga Islands)**|`to`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Tuvinian**|`tyv`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Upper Sorbian**|`hsb`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Volap\u00fck**|`vo`|\n|**Walser**|`wae`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Yucateco**|`yua`|\n|**Zhuang**|`za`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Arabic (Pseudo-Accents)**|`ar-XA`|\n|**Belarusian**|`be-cyrl`|\n|**Belarusian (Latin)**|`be-latn`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (United States)**|`en-US`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (France)**|`fr-FR`|\n|**German (Germany)**|`de-DE`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Italian (Italy)**|`it-IT`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Kara-Kalpak (Cyrillic)**|`kaa-Cyrl`|\n|**Kazakh**|`kk-cyrl`|\n|**Kazakh (Latin)**|`kk-latn`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Kurdish (Arabic)**|`ku-arab`|\n|**Kurdish (Latin)**|`ku-latn`|\n|**Polish**|`pl-PO`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Region: Czechia**|`cz-CZ`|\n|**Region: Greece**|`gr-GR`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Cyrillic, Montenegro)**|`sr-Cyrl-ME`|\n|**Serbian (Latin)**|`sr-latn`|\n|**Serbian (Latin, Montenegro)**|`sr-Latn-ME`|\n|**Spanish (Spain)**|`es-ES`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Uzbek (Arabic)**|`uz-arab`|\n|**Uzbek (Cyrillic)**|`uz-cyrl`|\n\n
", "summary": "OCR", "tags": ["Ocr"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrocrOcrRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrocrOcrRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrocrResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/ocr_async/": {"get": {"operationId": "ocr_ocr_ocr_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Ocr Async List Job", "tags": ["Ocr Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_ocr_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|1.5 (per 1000 page)|1 page\n|**microsoft**|`rest API 4.0 (2024-02-29-preview)`|10.0 (per 1000 page)|1 page\n|**mistral**|`v1`|1.0 (per 1000 page)|1 page\n\n\n
\n\n", "summary": "Ocr Async Launch Job", "tags": ["Ocr Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncOcrRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncOcrRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_ocr_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Ocr Async delete Jobs", "tags": ["Ocr Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/ocr_async/{public_id}/": {"get": {"operationId": "ocr_ocr_ocr_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Ocr Async Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Ocr Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocrocr_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/ocr_tables_async/": {"get": {"operationId": "ocr_ocr_ocr_tables_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "OCR Tables List Job", "tags": ["Ocr Tables Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_ocr_tables_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|15.0 (per 1000 page)|1 page\n|**google**|`DocumentAI v1 beta3`|65.0 (per 1000 page)|1 page\n|**microsoft**|`rest API 4.0 (2024-02-29-preview)`|10.0 (per 1000 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Angika**|`anp`|\n|**Arabic**|`ar`|\n|**Asturian**|`ast`|\n|**Awadhi**|`awa`|\n|**Azerbaijani**|`az`|\n|**Bagheli**|`bfy`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bhojpuri**|`bho`|\n|**Bislama**|`bi`|\n|**Bodo (India)**|`brx`|\n|**Bosnian**|`bs`|\n|**Braj**|`bra`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Bundeli**|`bns`|\n|**Buriat**|`bua`|\n|**Camling**|`rab`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chamorro**|`ch`|\n|**Chhattisgarhi**|`hne`|\n|**Chinese**|`zh`|\n|**Cornish**|`kw`|\n|**Corsican**|`co`|\n|**Crimean Tatar**|`crh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dari**|`prs`|\n|**Dhimal**|`dhi`|\n|**Dogri (macrolanguage)**|`doi`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Erzya**|`myv`|\n|**Estonian**|`et`|\n|**Faroese**|`fo`|\n|**Fijian**|`fj`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Friulian**|`fur`|\n|**Gagauz**|`gag`|\n|**Galician**|`gl`|\n|**German**|`de`|\n|**Gilbertese**|`gil`|\n|**Gondi**|`gon`|\n|**Gurung**|`gvr`|\n|**Haitian**|`ht`|\n|**Halbi**|`hlb`|\n|**Hani**|`hni`|\n|**Haryanvi**|`bgc`|\n|**Hawaiian**|`haw`|\n|**Hindi**|`hi`|\n|**Hmong Daw**|`mww`|\n|**Ho**|`hoc`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Inari Sami**|`smn`|\n|**Indonesian**|`id`|\n|**Interlingua (International Auxiliary Language Association)**|`ia`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Jaunsari**|`jns`|\n|**Javanese**|`jv`|\n|**K'iche'**|`quc`|\n|**Kabuverdianu**|`kea`|\n|**Kachin**|`kac`|\n|**Kalaallisut**|`kl`|\n|**Kangri**|`xnr`|\n|**Kara-Kalpak**|`kaa`|\n|**Karachay-Balkar**|`krc`|\n|**Kashubian**|`csb`|\n|**Kazakh**|`kk`|\n|**Khaling**|`klr`|\n|**Khasi**|`kha`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Korku**|`kfq`|\n|**Koryak**|`kpy`|\n|**Kosraean**|`kos`|\n|**Kumarbhag Paharia**|`kmj`|\n|**Kumyk**|`kum`|\n|**Kurdish**|`ku`|\n|**Kurukh**|`kru`|\n|**K\u00f6lsch**|`ksh`|\n|**Lakota**|`lkt`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Lower Sorbian**|`dsb`|\n|**Lule Sami**|`smj`|\n|**Luxembourgish**|`lb`|\n|**Mahasu Pahari**|`bfz`|\n|**Malay (macrolanguage)**|`ms`|\n|**Maltese**|`mt`|\n|**Manx**|`gv`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Mongolian**|`mn`|\n|**Montenegrin**|`cnr`|\n|**Neapolitan**|`nap`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Niuean**|`niu`|\n|**Nogai**|`nog`|\n|**Northern Sami**|`se`|\n|**Norwegian**|`no`|\n|**Occitan (post 1500)**|`oc`|\n|**Ossetian**|`os`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Romansh**|`rm`|\n|**Russian**|`ru`|\n|**Sadri**|`sck`|\n|**Samoan**|`sm`|\n|**Sanskrit**|`sa`|\n|**Santali**|`sat`|\n|**Scots**|`sco`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Sherpa**|`xsr`|\n|**Sirmauri**|`srx`|\n|**Skolt Sami**|`sms`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sami**|`sma`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tatar**|`tt`|\n|**Tetum**|`tet`|\n|**Thangmi**|`thf`|\n|**Tonga (Tonga Islands)**|`to`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Tuvinian**|`tyv`|\n|**Uighur**|`ug`|\n|**Upper Sorbian**|`hsb`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Volap\u00fck**|`vo`|\n|**Walser**|`wae`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Yucateco**|`yua`|\n|**Zhuang**|`za`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Belarusian**|`be-Cyrl`|\n|**Belarusian (Latin)**|`be-Latn`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Kara-Kalpak (Cyrillic)**|`kaa-Cyrl`|\n|**Kazakh**|`kk-Cyrl`|\n|**Kazakh (Latin)**|`kk-Latn`|\n|**Kurdish (Arabic)**|`ku-Arab`|\n|**Kurdish (Latin)**|`ku-Latn`|\n|**Serbian (Cyrillic)**|`sr-Cyrl`|\n|**Serbian (Cyrillic, Montenegro)**|`sr-Cyrl-ME`|\n|**Serbian (Latin)**|`sr-Latn`|\n|**Serbian (Latin, Montenegro)**|`sr-Latn-ME`|\n|**Uzbek (Arabic)**|`uz-Arab`|\n|**Uzbek (Cyrillic)**|`uz-cyrl`|\n\n
", "summary": "OCR Tables Launch Job", "tags": ["Ocr Tables Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/OcrTablesAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/OcrTablesAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_ocr_tables_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "OCR Tables delete Jobs", "tags": ["Ocr Tables Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/ocr_tables_async/{public_id}/": {"get": {"operationId": "ocr_ocr_ocr_tables_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "OCR Tables Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Ocr Tables Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocrocr_tables_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/resume_parser/": {"post": {"operationId": "ocr_ocr_resume_parser_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**affinda**|-|`v3`|0.07 (per 1 file)|1 file\n|**klippa**|-|`v1`|0.1 (per 1 file)|1 file\n|**senseloaf**|-|`v3`|0.045 (per 1 file)|1 file\n|**extracta**|-|`v1`|0.1 (per 1 page)|1 page\n|**openai**|-|`v1.0`|0.04 (per 1 page)|1 page\n|**openai**|**gpt-4o**|`v1.0`|0.04 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-cn`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Taiwan)**|`zh-tw`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Resume Parser", "tags": ["Resume Parser"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrresume_parserResumeParserRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrresume_parserResumeParserRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrresume_parserResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/prompts/": {"get": {"operationId": "prompts_prompts_list", "summary": "List Prompts", "parameters": [{"name": "page", "required": false, "in": "query", "description": "A page number within the paginated result set.", "schema": {"type": "integer"}}, {"name": "page_size", "required": false, "in": "query", "description": "Number of results to return per page.", "schema": {"type": "integer"}}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedPromptCreateList"}}}, "description": ""}}}, "post": {"operationId": "prompts_prompts_create", "summary": "Create Prompts", "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptCreateRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptCreateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptCreateRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptCreate"}}}, "description": ""}}}}, "/prompts/{name}/": {"get": {"operationId": "prompts_prompts_retrieve", "summary": "Get Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "post": {"operationId": "prompts_prompts_create_2", "description": "Call prompt with your variables in prompt_context. You can override params saved in the prompt and specifiy file urls", "summary": "Call prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptCallRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptCallRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptCallRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "put": {"operationId": "prompts_prompts_update", "summary": "Update Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdateRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptUpdateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "patch": {"operationId": "prompts_prompts_partial_update", "summary": "Update Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedPromptUpdateRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedPromptUpdateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedPromptUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "delete": {"operationId": "prompts_prompts_destroy", "summary": "Delete Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/prompts/{name}/history/": {"get": {"operationId": "prompts_prompts_history_list", "summary": "List Prompt History", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"name": "page", "required": false, "in": "query", "description": "A page number within the paginated result set.", "schema": {"type": "integer"}}, {"name": "page_size", "required": false, "in": "query", "description": "Number of results to return per page.", "schema": {"type": "integer"}}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedPromptHistoryList"}}}, "description": ""}}}, "post": {"operationId": "prompts_prompts_history_create", "summary": "Create Prompt History", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}}, "/prompts/{name}/history/{id}/": {"get": {"operationId": "prompts_prompts_history_retrieve", "summary": "Get Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}, "put": {"operationId": "prompts_prompts_history_update", "summary": "Update Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}, "patch": {"operationId": "prompts_prompts_history_partial_update", "summary": "Update Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedPromptHistoryRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedPromptHistoryRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedPromptHistoryRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}, "delete": {"operationId": "prompts_prompts_history_destroy", "summary": "Delete Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/prompts/{name}/history/{id}/template-variables/": {"get": {"operationId": "prompts_prompts_history_template_variables_retrieve", "summary": "Get Prompt Template Variables", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"description": "No response body"}}}}, "/resources/": {"get": {"operationId": "resources_resources_list", "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/ResourceList"}}}}, "description": ""}}}, "post": {"operationId": "resources_resources_create", "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceCreateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceCreate"}}}, "description": ""}}}}, "/resources/{resource}/": {"get": {"operationId": "resources_resources_retrieve", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdate"}}}, "description": ""}}}, "put": {"operationId": "resources_resources_update", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdate"}}}, "description": ""}}}, "patch": {"operationId": "resources_resources_partial_update", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedResourceUpdateRequest"}}}}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdate"}}}, "description": ""}}}, "delete": {"operationId": "resources_resources_destroy", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/resources/{resource}/asset/": {"post": {"operationId": "resources_resources_asset_create", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetCreateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetCreate"}}}, "description": ""}}}}, "/resources/{resource}/asset/{asset}/": {"get": {"operationId": "resources_resources_asset_retrieve", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdate"}}}, "description": ""}}}, "put": {"operationId": "resources_resources_asset_update", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdate"}}}, "description": ""}}}, "patch": {"operationId": "resources_resources_asset_partial_update", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedAssetUpdateRequest"}}}}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdate"}}}, "description": ""}}}, "delete": {"operationId": "resources_resources_asset_destroy", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/text/ai_detection/": {"post": {"operationId": "text_text_ai_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sapling**|`v1`|5.0 (per 1000000 char)|1000 char\n|**winstonai**|`v2`|14.0 (per 1000000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Chinese**|`zh`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Spanish**|`es`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n\n
", "summary": "AI Content Detection", "tags": ["Ai Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textai_detectionAiDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textai_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/anonymization/": {"post": {"operationId": "text_text_anonymization_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|-|`v3.0.0`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**microsoft**|-|`v3.1`|0.25 (per 1000000 char)|1000 char\n|**privateai**|-|`v3`|5.0 (per 1000000 char)|100 char\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Arabic**|`ar`|\n|**Bambara**|`bm`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Malay (macrolanguage)**|`ms`|\n|**Modern Greek (1453-)**|`el`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Vietnamese**|`vi`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Anonymization", "tags": ["Anonymization"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textanonymizationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/chat/": {"post": {"operationId": "text_text_chat_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|**o3-mini**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|-|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**o1-preview**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v1Beta`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v1Beta`|1.4e-05 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v1Beta`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v1Beta`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v1Beta`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v1Beta`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v1Beta`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v1Beta`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v1Beta`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v1Beta`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v1Beta`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v1Beta`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v1Beta`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v1Beta`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v1Beta`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v1Beta`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v1Beta`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v1Beta`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v1Beta`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v1Beta`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v1Beta`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v1Beta`|0.0 (per 1 seconde)|1 seconde\n|**google**|**gemini-1.5-flash-8b-latest**|`v1`|0.3 (per 1000000 token)|1 token\n|**google**|-|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|-|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-light**|`2022-12-06`|0.6 (per 1000000 token)|1 token\n|**cohere**|**command-nightly**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-light-nightly**|`2022-12-06`|0.6 (per 1000000 token)|1 token\n|**cohere**|**command-r**|`2022-12-06`|1.5 (per 1000000 token)|1 token\n|**cohere**|**command-r7b-12-2024**|`2022-12-06`|0.15 (per 1000000 token)|1 token\n|**meta**|**llama3-1-405b-instruct-v1:0**|`boto3 (v1.35.84)`|2.4 (per 1000000 token)|1 token\n|**meta**|**llama3-1-70b-instruct-v1:0**|`boto3 (v1.35.84)`|0.72 (per 1000000 token)|1 token\n|**meta**|**llama3-1-8b-instruct-v1:0**|`boto3 (v1.35.84)`|0.22 (per 1000000 token)|1 token\n|**meta**|-|`boto3 (v1.35.84)`|0.15 (per 1000000 token)|1 token\n|**mistral**|-|`v0.0.1`|6.0 (per 1000000 token)|1 token\n|**mistral**|**pixtral-large-latest**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-saba-latest**|`v0.0.1`|0.6 (per 1000000 token)|1 token\n|**mistral**|**mistral-small-latest**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**mistral-large-latest**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**codestral-latest**|`v0.0.1`|3e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2405**|`v0.0.1`|3e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2508**|`v0.0.1`|9e-07 (per 1 token)|1 token\n|**mistral**|**codestral-mamba-latest**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**devstral-medium-2507**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**devstral-small-2505**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-small-2507**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**labs-devstral-small-2512**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-2512**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-2506**|`v0.0.1`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-2509**|`v0.0.1`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-latest**|`v0.0.1`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-2506**|`v0.0.1`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-latest**|`v0.0.1`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2402**|`v0.0.1`|1.2e-05 (per 1 token)|1 token\n|**mistral**|**mistral-large-2407**|`v0.0.1`|9e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2411**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-3**|`v0.0.1`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium**|`v0.0.1`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2312**|`v0.0.1`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2505**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-latest**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**mistral-small**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**mistral-tiny**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-codestral-mamba**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-7b**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo-2407**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x22b**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x7b**|`v0.0.1`|7e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-12b-2409**|`v0.0.1`|1.5e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-large-2411**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**perplexityai**|-|`v1.0`|2.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar-pro**|`v1.0`|30.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar**|`v1.0`|2.0 (per 1000000 token)|1 token\n|**anthropic**|-|`bedrock-2023-05-31`|15.0 (per 1000000 token)|1 token\n|**anthropic**|**claude-3-5-haiku-20241022-v1:0**|`bedrock-2023-05-31`|4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20240620-v1:0**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20241022-v2:0**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20240620-v1:0**|`bedrock-2023-05-31`|1.8e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-haiku-20240307-v1:0**|`bedrock-2023-05-31`|1.25e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-opus-20240229-v1:0**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-sonnet-20240229-v1:0**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-instant-v1**|`bedrock-2023-05-31`|2.4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-v1**|`bedrock-2023-05-31`|2.4e-05 (per 1 token)|1 token\n|**anthropic**|**claude-v2:1**|`bedrock-2023-05-31`|2.4e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-20241022**|`bedrock-2023-05-31`|4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-latest**|`bedrock-2023-05-31`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5-20251001**|`bedrock-2023-05-31`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5**|`bedrock-2023-05-31`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20240620**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20241022**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-latest**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20250219**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-latest**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-haiku-20240307**|`bedrock-2023-05-31`|1.25e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-opus-20240229**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-opus-latest**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-4-opus-20250514**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-4-sonnet-20250514**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5-20250929**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1-20250805**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-20250514**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5-20251101**|`bedrock-2023-05-31`|2.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5**|`bedrock-2023-05-31`|2.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-20250514**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**xai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**deepseek**|**deepseek-v3**|`v1`|1.1e-06 (per 1 token)|1 token\n|**deepseek**|-|`v1`|1.25 (per 1000000 token)|1 token\n|**deepseek**|**deepseek-reasoner**|`v1`|2.19e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-chat**|`v1`|1.1e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-coder**|`v1`|2.8e-07 (per 1 token)|1 token\n|**deepseek**|**deepseek-r1**|`v1`|2.19e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-v3.2**|`v1`|4e-07 (per 1 token)|1 token\n|**amazon**|-|`boto3 (v1.29.6)`|0.24 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-lite-v1:0**|`boto3 (v1.29.6)`|0.24 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-micro-v1:0**|`boto3 (v1.29.6)`|0.14 (per 1000000 token)|1 token\n|**together_ai**|-|`v1`|1.2 (per 1000000 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-72B-Instruct-Turbo**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`v1`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-21.1b-41b**|`v1`|8e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-4.1b-8b**|`v1`|2e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-41.1b-80b**|`v1`|9e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-8.1b-21b**|`v1`|3e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-81.1b-110b**|`v1`|1.8e-06 (per 1 token)|1 token\n|**together_ai**|**together-ai-up-to-4b**|`v1`|1e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-7B-Instruct-Turbo**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Instruct-2507-tput**|`v1`|6e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Thinking-2507**|`v1`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-fp8-tput**|`v1`|6e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8**|`v1`|2e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1**|`v1`|7e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1-0528-tput**|`v1`|2.19e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3**|`v1`|1.25e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3.1**|`v1`|1.7e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-3.2-3B-Instruct-Turbo**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo-Free**|`v1`|0.0 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8**|`v1`|8.5e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Scout-17B-16E-Instruct**|`v1`|5.9e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo**|`v1`|3.5e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`v1`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`v1`|1.8e-07 (per 1 token)|1 token\n|**together_ai**|**mistralai/Mistral-7B-Instruct-v0.1**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**mistralai/Mistral-Small-24B-Instruct-2501**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`v1`|6e-07 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct**|`v1`|3e-06 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-120b**|`v1`|6e-07 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-20b**|`v1`|2e-07 (per 1 token)|1 token\n|**together_ai**|**togethercomputer/CodeLlama-34b-Instruct**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**zai-org/GLM-4.5-Air-FP8**|`v1`|1.1e-06 (per 1 token)|1 token\n|**together_ai**|**zai-org/GLM-4.6**|`v1`|2.2e-06 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct-0905**|`v1`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Instruct**|`v1`|1.5e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Thinking**|`v1`|1.5e-06 (per 1 token)|1 token\n|**microsoft**|**gpt-4o**|`Azure AI Foundry`|15.0 (per 1000000 token)|1 token\n|**groq**|**llama3-70b-8192**|`v1`|0.59 (per 1000000 token)|1 token\n|**groq**|**llama-3.1-8b-instant**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**llama-3.3-70b-versatile**|`v1`|7.9e-07 (per 1 token)|1 token\n|**groq**|**gemma-7b-it**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-guard-4-12b**|`v1`|2e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-maverick-17b-128e-instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-scout-17b-16e-instruct**|`v1`|3.4e-07 (per 1 token)|1 token\n|**groq**|**moonshotai/kimi-k2-instruct-0905**|`v1`|3e-06 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-120b**|`v1`|7.5e-07 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-20b**|`v1`|5e-07 (per 1 token)|1 token\n|**groq**|**qwen/qwen3-32b**|`v1`|5.9e-07 (per 1 token)|1 token\n\n\n
\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**google**|`gemini-2.5-flash`|\n|**cohere**|`command`|\n|**meta**|`llama3-1-8b-instruct-v1:0`|\n|**mistral**|`mistral-large-latest`|\n|**perplexityai**|`sonar`|\n|**anthropic**|`claude-3-5-sonnet-latest`|\n|**xai**|`grok-2-latest`|\n|**deepseek**|`deepseek-chat`|\n|**amazon**|`amazon.nova-lite-v1:0`|\n|**together_ai**|`Qwen/Qwen2.5-72B-Instruct-Turbo`|\n|**microsoft**|`gpt-4o`|\n|**groq**|`llama3-70b-8192`|\n\n
", "summary": "Chat", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textchatChatRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textchatResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/chat/stream/": {"post": {"operationId": "text_text_chat_stream_create", "description": "Streamed version of Chat feature, the raw text will be streamed chunk by chunk.\n\nNOTE: For this feature, you an only request one provider at a time.", "summary": "Chat Stream", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textchatChatStreamRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"text/plain": {"schema": {"type": "string"}}}, "description": ""}}}}, "/text/code_generation/": {"post": {"operationId": "text_text_code_generation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v1`|1.5e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v1`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v1`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v1`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v1`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v1`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v1`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v1`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v1`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v1`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v1`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v1`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v1`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v1`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v1`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v1`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v1`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v1`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v1`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v1`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v1`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**google**|-|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-latest**|`v1`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n|**xai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**google**|`gemini-1.5-flash`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Code Generation", "tags": ["Code Generation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textcode_generationCodeGenerationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textcode_generationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/embeddings/": {"post": {"operationId": "text_text_embeddings_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|-|`v3.0.0`|0.1 (per 1000000 token)|1 token\n|**openai**|**1536__text-embedding-ada-002**|`v3.0.0`|0.1 (per 1000000 token)|1 token\n|**openai**|**text-embedding-3-large**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-embedding-3-small**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-embedding-ada-002**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-embedding-ada-002-v2**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**google**|**gemini-embedding-001**|`v1`|1.5e-07 (per 1 token)|1 token\n|**cohere**|**embed-english-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-english-light-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-multilingual-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-english-v2.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-english-light-v2.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-multilingual-v2.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-v4.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-multilingual-light-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**mistral**|**1024__mistral-embed**|`v0.0.1`|0.1 (per 1000000 token)|1 token\n|**mistral**|-|`v0.0.1`|0.1 (per 1000000 token)|1 token\n|**mistral**|**mistral/mistral-embed**|`v0.0.1`|0.1 (per 1000000 seconde)|1 seconde\n|**mistral**|**mistral-embed**|`v0.0.1`|0.0 (per 1 seconde)|1 seconde\n|**mistral**|**codestral-embed**|`v0.0.1`|0.0 (per 1 seconde)|1 seconde\n|**mistral**|**codestral-embed-2505**|`v0.0.1`|0.0 (per 1 seconde)|1 seconde\n|**jina**|-|`v1`|0.018 (per 1000000 token)|1 token\n|**jina**|**jina-embeddings-v2-base-en**|`v1`|0.018 (per 1000000 token)|1 token\n|**jina**|**jina-embeddings-v3**|`v1`|0.02 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`1536__text-embedding-ada-002`|\n|**google**|`text-multilingual-embedding-002`|\n|**cohere**|`4096__embed-english-v2.0`|\n|**mistral**|`1024__mistral-embed`|\n|**jina**|`jina-embeddings-v3`|\n\n
", "summary": "Embeddings", "tags": ["Embeddings"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textembeddingsEmbeddingsRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textembeddingsResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/emotion_detection/": {"post": {"operationId": "text_text_emotion_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**vernai**|`v1`|2.0 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Emotion Detection", "tags": ["Emotion Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textemotion_detectionEmotionDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textemotion_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/entity_sentiment/": {"post": {"operationId": "text_text_entity_sentiment_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 1.26.8`|1.0 (per 1000000 char)|300 char\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**Japanese**|`ja`|\n|**Spanish**|`es`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-1.5-flash`|\n\n
", "summary": "Entity Sentiment", "tags": ["Entity Sentiment"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textentity_sentimentEntitySentimentRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textentity_sentimentResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/keyword_extraction/": {"post": {"operationId": "text_text_keyword_extraction_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**tenstorrent**|-|`v1.0.0`|0.7 (per 1000000 char)|1000 char\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Arabic**|`ar`|\n|**Bulgarian**|`bg`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Modern Greek (1453-)**|`el`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n|**Turkish**|`tr`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Keyword Extraction", "tags": ["Keyword Extraction"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textkeyword_extractionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/moderation/": {"post": {"operationId": "text_text_moderation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**microsoft**|-|`v1.0`|1.0 (per 1000 request)|1 request\n|**openai**|-|`v3.0.0`|free|-\n|**openai**|**text-moderation-stable**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-moderation-007**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-moderation-latest**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**google**|-|`v1`|5.0 (per 1000000 char)|100 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Assamese**|`as`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Catalan**|`ca`|\n|**Central Kurdish**|`ckb`|\n|**Cherokee**|`chr`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Fulah**|`ff`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Hausa**|`ha`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Konkani (macrolanguage)**|`kok`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Norwegian Nynorsk**|`nn`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Pedi**|`nso`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Quechua**|`qu`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Tigrinya**|`ti`|\n|**Tswana**|`tn`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wolof**|`wo`|\n|**Xhosa**|`xh`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n\n

Supported Models

\n\n", "summary": "Moderation", "tags": ["Moderation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textmoderationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/named_entity_recognition/": {"post": {"operationId": "text_text_named_entity_recognition_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|10.0 (per 1000000 token)|1 token\n|**tenstorrent**|-|`v1.0.0`|1.0 (per 1000000 char)|1000 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Arabic**|`ar`|\n|**Chinese**|`zh`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Russian**|`ru`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n|**Turkish**|`tr`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Named Entity Recognition", "tags": ["Named Entity Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textnamed_entity_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/plagia_detection/": {"post": {"operationId": "text_text_plagia_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**winstonai**|`v2`|14.0 (per 1000000 char)|1 char\n\n\n
\n\n", "summary": "Plagia Detection", "tags": ["Plagia Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textplagia_detectionPlagiaDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textplagia_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/prompt_optimization/": {"post": {"operationId": "text_text_prompt_optimization_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n\n\n
\n\n

Supported Models

\n\n", "summary": "Prompt Optimization", "tags": ["Prompt Optimization"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textprompt_optimizationPromptOptimizationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textprompt_optimizationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/sentiment_analysis/": {"post": {"operationId": "text_text_sentiment_analysis_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|20.0 (per 1000000 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**tenstorrent**|-|`v1.1.0`|0.7 (per 1000000 char)|1000 char\n|**sapling**|-|`v1`|20.0 (per 1000000 char)|1000 char\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Arabic**|`ar`|\n|**Chinese**|`zh`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hindi**|`hi`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Modern Greek (1453-)**|`el`|\n|**Norwegian**|`no`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Russian**|`ru`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Vietnamese**|`vi`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-1.5-flash`|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Sentiment Analysis", "tags": ["Sentiment Analysis"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textsentiment_analysisResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/spell_check/": {"post": {"operationId": "text_text_spell_check_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**prowritingaid**|`v2`|10.0 (per 1000 request)|1 request\n|**sapling**|`v1`|2.0 (per 1000000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n|**jp**|`jp`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Simplified)**|`zh-hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-hant`|\n|**English (United Kingdom)**|`en-gb`|\n|**Portuguese (Brazil)**|`pt-br`|\n|**Portuguese (Portugal)**|`pt-pt`|\n\n
", "summary": "Spell Check", "tags": ["Spell Check"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textspell_checkSpellCheckRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textspell_checkResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/summarize/": {"post": {"operationId": "text_text_summarize_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**microsoft**|-|`v3.1`|2.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|60.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**cohere**|-|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**summarize-xlarge**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-nightly**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-nightly**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v3.0.0`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v3.0.0`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Bulgarian**|`bg`|\n|**Chinese**|`zh`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hungarian**|`hu`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Modern Greek (1453-)**|`el`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Brazil)**|`pt-br`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Portuguese (Portugal)**|`pt-pt`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4`|\n|**cohere**|`summarize-xlarge`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Summarize", "tags": ["Summarize"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textsummarizeSummarizeRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textsummarizeResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/topic_extraction/": {"post": {"operationId": "text_text_topic_extraction_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|-|`v1`|0.6 (per 1000000 char)|1 char\n|**openai**|**gpt-4o**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**tenstorrent**|-|`v1.0.0`|2.0 (per 1000000 char)|1000 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Topic Extraction", "tags": ["Topic Extraction"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/translation/automatic_translation/": {"post": {"operationId": "translation_translation_automatic_translation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|15.0 (per 1000000 char)|1 char\n|**google**|-|`v3`|20.0 (per 1000000 char)|1 char\n|**microsoft**|-|`v3.0`|10.0 (per 1000000 char)|1 char\n|**deepl**|-|`v2`|20.0 (per 1000000 char)|1 char\n|**modernmt**|-|`1.2.8`|8.0 (per 1000000 char)|1 char\n|**openai**|-|`v1`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1`|20.0 (per 1000000 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Assamese**|`as`|\n|**Azerbaijani**|`az`|\n|**Bashkir**|`ba`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dari**|`prs`|\n|**Dhivehi**|`dv`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Faroese**|`fo`|\n|**Fijian**|`fj`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hmong Daw**|`mww`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Inuinnaqtun**|`ikt`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Klingon**|`tlh`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Literary Chinese**|`lzh`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Northern Kurdish**|`kmr`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Norwegian Nynorsk**|`nn`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Quer\u00e9taro Otomi**|`otq`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tahitian**|`ty`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Tibetan**|`bo`|\n|**Tigrinya**|`ti`|\n|**Tonga (Tonga Islands)**|`to`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Upper Sorbian**|`hsb`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Yucateco**|`yua`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**French (Canada)**|`fr-CA`|\n|**Inuktitut (Latin)**|`iu-Latn`|\n|**Klingon (Klingon (KLI pIqaD))**|`tlh-Piqd`|\n|**Klingon (Latin)**|`tlh-Latn`|\n|**Mongolian (Cyrillic)**|`mn-Cyrl`|\n|**Mongolian (Mongolian)**|`mn-Mong`|\n|**Persian (Afghanistan)**|`fa-AF`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Serbian (Cyrillic)**|`sr-Cyrl`|\n|**Serbian (Latin)**|`sr-Latn`|\n|**Spanish (Latin America)**|`es-419`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Spain)**|`es-ES`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Automatic Translation", "tags": ["Automatic Translation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationautomatic_translationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/translation/document_translation/": {"post": {"operationId": "translation_translation_document_translation_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**deepl**|`v2`|2.0 (per 20 page)|20 page\n|**google**|`v3`|0.08 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Taiwan)**|`zh-TW`|\n\n
", "summary": "Document Translation", "tags": ["Document Translation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationdocument_translationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/translation/language_detection/": {"post": {"operationId": "translation_translation_language_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**google**|-|`v1`|20.0 (per 1000000 char)|1 char\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**modernmt**|-|`1.1.0`|8.0 (per 1000000 char)|1 char\n|**openai**|-|`v1`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2**|`v1`|10.0 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Language Detection", "tags": ["Language Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationlanguage_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/user/custom_token/": {"get": {"operationId": "user_user_custom_token_list", "summary": "List Tokens", "tags": ["User Management"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/CustomTokensList"}}}}, "description": ""}}}, "post": {"operationId": "user_user_custom_token_create", "summary": "Create new Token", "tags": ["User Management"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokensCreateRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokensCreate"}}}, "description": ""}}}}, "/user/custom_token/{name}/": {"get": {"operationId": "user_user_custom_token_retrieve", "summary": "Retrieve Token", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["User Management"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokensList"}}}, "description": ""}}}, "patch": {"operationId": "user_user_custom_token_partial_update", "summary": "Update Token", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["User Management"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedCustomTokenUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokenUpdate"}}}, "description": ""}}}, "delete": {"operationId": "user_user_custom_token_destroy", "summary": "Delete Token", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["User Management"], "security": [{"FeatureApiAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/video/deepfake_detection_async/": {"get": {"operationId": "video_video_deepfake_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Deepfake Detection List Jobs", "tags": ["Deepfake Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_deepfake_detection_async_create", "description": "No Provider Available\n\n\n\n", "summary": "Deepfake Detection Launch Job", "tags": ["Deepfake Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_deepfake_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Deepfake Detection delete Jobs", "tags": ["Deepfake Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/deepfake_detection_async/{public_id}/": {"get": {"operationId": "video_video_deepfake_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Deepfake Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Deepfake Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideodeepfake_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/explicit_content_detection_async/": {"get": {"operationId": "video_video_explicit_content_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Explicit Content Detection List Jobs", "tags": ["Explicit Content Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_explicit_content_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Video Explicit Content Detection Launch Job", "tags": ["Explicit Content Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_explicit_content_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Explicit Content delete Jobs", "tags": ["Explicit Content Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/explicit_content_detection_async/{public_id}/": {"get": {"operationId": "video_video_explicit_content_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Explicit Content Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Explicit Content Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoexplicit_content_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/face_detection_async/": {"get": {"operationId": "video_video_face_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Face Detection List Jobs", "tags": ["Face Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_face_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Face Detection Launch Job", "tags": ["Face Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_face_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Face Detection delete Jobs", "tags": ["Face Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/face_detection_async/{public_id}/": {"get": {"operationId": "video_video_face_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Face Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Face Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoface_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/generation_async/": {"get": {"operationId": "video_video_generation_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Generation List Jobs", "tags": ["Generation Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_generation_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|**amazon.nova-reel-v1:1**|`boto3`|0.08 (per 1 seconde)|1 seconde\n|**amazon**|**amazon.nova-reel-v1:0**|`boto3`|0.08 (per 1 seconde)|1 seconde\n|**amazon**|-|`boto3`|0.5 (per 1 request)|1 request\n|**minimax**|**MiniMax-Hailuo-2.3**|`v1`|0.28 (per 1 request)|1 request\n|**minimax**|**MiniMax-Hailuo-02**|`v1`|0.28 (per 1 request)|1 request\n|**minimax**|-|`v1`|0.56 (per 1 request)|1 request\n|**minimax**|**S2V-01**|`v1`|0.65 (per 1 request)|1 request\n|**minimax**|**T2V/I2V-01-Director**|`v1`|0.43 (per 1 request)|1 request\n|**bytedance**|**seedance-1-0-pro-250528**|`v3`|0.62 (per 1 request)|1 request\n|**bytedance**|-|`v3`|1.8 (per 1000000 token)|1 token\n|**bytedance**|**seedance-1-0-lite-t2v-250428**|`v3`|1.8 (per 1000000 token)|1 token\n|**google**|**veo-3.1-generate-preview**|`v1Beta`|0.4 (per 1 seconde)|1 seconde\n|**google**|**veo-3.0-generate-001**|`v1Beta`|0.4 (per 1 seconde)|1 seconde\n|**google**|**veo-3.0-fast-generate-001**|`v1Beta`|0.15 (per 1 seconde)|1 seconde\n|**google**|-|`v1Beta`|3.6 (per 1 request)|1 request\n|**openai**|-|`v1`|5.0 (per 1 request)|1 request\n|**openai**|**sora-2**|`v1`|0.1 (per 1 seconde)|1 seconde\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**amazon**|`amazon.nova-reel-v1:1`|\n|**minimax**|`MiniMax-Hailuo-2.3`|\n|**bytedance**|`seedance-1-0-pro-250528`|\n|**google**|`veo-3.1-generate-preview`|\n|**openai**|`sora-2`|\n\n
", "summary": "Generation Launch Job", "tags": ["Generation Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/GenerationAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/GenerationAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_generation_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Generation delete Jobs", "tags": ["Generation Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/generation_async/{public_id}/": {"get": {"operationId": "video_video_generation_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Generation Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Generation Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideogeneration_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/label_detection_async/": {"get": {"operationId": "video_video_label_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Label Detection List Jobs", "tags": ["Label Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_label_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Label Detection Launch Job", "tags": ["Label Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_label_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Label Detection delete Jobs", "tags": ["Label Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/label_detection_async/{public_id}/": {"get": {"operationId": "video_video_label_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Label Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Label Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideolabel_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/logo_detection_async/": {"get": {"operationId": "video_video_logo_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Logo Detection List Jobs", "tags": ["Logo Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_logo_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`Video intelligence v1`|0.15 (per 60 seconde)|1 seconde\n|**twelvelabs**|`v1.1.2`|0.1 (per 60 seconde)|60 seconde\n\n\n
\n\n", "summary": "Video Logo Detection Launch Job", "tags": ["Logo Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_logo_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Logo Detection delete Jobs", "tags": ["Logo Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/logo_detection_async/{public_id}/": {"get": {"operationId": "video_video_logo_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Logo Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Logo Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideologo_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/object_tracking_async/": {"get": {"operationId": "video_video_object_tracking_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Object Tracking List Jobs", "tags": ["Object Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_object_tracking_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`Video intelligence v1`|0.15 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Video Object Tracking Launch Job", "tags": ["Object Tracking Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_object_tracking_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Object Tracking delete Jobs", "tags": ["Object Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/object_tracking_async/{public_id}/": {"get": {"operationId": "video_video_object_tracking_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Object Tracking Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Object Tracking Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoobject_tracking_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/person_tracking_async/": {"get": {"operationId": "video_video_person_tracking_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Person Tracking List Jobs", "tags": ["Person Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_person_tracking_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Person Tracking Launch Job", "tags": ["Person Tracking Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_person_tracking_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Person Tracking delete Jobs", "tags": ["Person Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/person_tracking_async/{public_id}/": {"get": {"operationId": "video_video_person_tracking_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Person Tracking Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Person Tracking Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoperson_tracking_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/question_answer/": {"post": {"operationId": "video_video_question_answer_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|**gemini-3-flash-preview**|`v1Beta`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|-|`v1Beta`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview**|`v1Beta`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1Beta`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-flash-lite-preview**|`v1Beta`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1Beta`|0.3 (per 1000000 token)|1 token\n|**amazon**|-|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-2.5-flash`|\n|**amazon**|`amazon.nova-pro-v1:0`|\n\n
", "summary": "Question Answer", "tags": ["Question Answer"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/videoquestion_answerResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/question_answer_async/": {"get": {"operationId": "video_video_question_answer_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Question Answer List Jobs", "tags": ["Question Answer Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_question_answer_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|**gemini-3-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1Beta`|3e-06 (per 1 token)|1 token\n|**google**|-|`v1Beta`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview**|`v1Beta`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1Beta`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-flash-lite-preview**|`v1Beta`|1.5e-06 (per 1 token)|None token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1Beta`|0.3 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-2.5-flash`|\n\n
", "summary": "Question Answer Launch Job", "tags": ["Question Answer Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/QuestionAnswerAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/QuestionAnswerAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_question_answer_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Question Answer delete Jobs", "tags": ["Question Answer Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/question_answer_async/{public_id}/": {"get": {"operationId": "video_video_question_answer_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Question Answer Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Question Answer Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoquestion_answer_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/shot_change_detection_async/": {"get": {"operationId": "video_video_shot_change_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Shot Change Detection List Jobs", "tags": ["Shot Change Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_shot_change_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`Video intelligence v1`|0.05 (per 1 minute)|1 minute\n\n\n
\n\n", "summary": "Shot Change Detection Launch Job", "tags": ["Shot Change Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_shot_change_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Shot Change Detection delete Jobs", "tags": ["Shot Change Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/shot_change_detection_async/{public_id}/": {"get": {"operationId": "video_video_shot_change_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Shot Change Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Shot Change Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoshot_change_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/text_detection_async/": {"get": {"operationId": "video_video_text_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Text Detection List Jobs", "tags": ["Text Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_text_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.15 (per 60 seconde)|1 seconde\n|**twelvelabs**|`v1.1.2`|0.067 (per 60 seconde)|60 seconde\n\n\n
\n\n", "summary": "Text Detection Launch Job", "tags": ["Text Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_text_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Text Detection delete Jobs", "tags": ["Text Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/text_detection_async/{public_id}/": {"get": {"operationId": "video_video_text_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Text Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Text Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideotext_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/workflow/": {"get": {"operationId": "workflow_workflow_list", "description": "List all created workflows. \n\nNOTE: you can only create a workflow through our app", "summary": "List Workflows", "tags": ["Workflows"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/Workflow"}}}}, "description": ""}}}}, "/workflow/{workflow_id}/": {"get": {"operationId": "workflow_workflow_retrieve", "description": "Get workflow Content (JSON representation)", "summary": "Retrieve a Workflow", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Workflows"], "security": [{"WorkflowShareApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Workflow"}}}, "description": ""}}}, "delete": {"operationId": "workflow_workflow_destroy", "summary": "Delete a Workflow", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Workflows"], "security": [{"WorkflowShareApiAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/workflow/{workflow_id}/execution/": {"get": {"operationId": "workflow_workflow_execution_list", "summary": "List Executions", "parameters": [{"name": "page", "required": false, "in": "query", "description": "A page number within the paginated result set.", "schema": {"type": "integer"}}, {"name": "page_size", "required": false, "in": "query", "description": "Number of results to return per page.", "schema": {"type": "integer"}}, {"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Executions"], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedExecutionListList"}}}, "description": ""}}}, "post": {"operationId": "workflow_workflow_execution_create", "description": "\nLaunch a workflow Execution.\nif you set `input` in your workflow, you will need to launch the execution with these inputs as parameters\nFor example if you set a file_url as input with name \"my_invoice\", your request body will look like this:\n\n { my_invoice: \"https://example.com/public/invoice_123.pdf\" }\n ", "summary": "Create an Execution", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Executions"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ExecutionFakeCreateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ExecutionFakeCreateRequest"}}}}, "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ExecutionExampleSuccessCreate"}}}, "description": ""}}}}, "/workflow/{workflow_id}/execution/{execution_id}/": {"get": {"operationId": "workflow_workflow_execution_retrieve", "description": "An Execution represents the process of running a workflow with specific input data. It tracks the workflow's progress, providing a unique id, a status (e.g., pending, running, completed), and a result once the execution is finished. You can retrieve an Execution by its id to monitor its status or access its result after completion", "summary": "Get an Execution by ID", "parameters": [{"in": "path", "name": "execution_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Executions"], "security": [{"WorkflowShareApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ExecutionExampleSuccessCreate"}}}, "description": ""}}}}, "/workflow/{workflow_id}/webhook/": {"put": {"operationId": "workflow_workflow_webhook_update", "summary": "Update workflow webhook URL", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Webhook"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/WebhookParametersRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/WebhookParametersRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/WebhookParametersRequest"}}}}, "security": [{"WorkflowShareApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/WebhookParameters"}}}, "description": ""}}}, "patch": {"operationId": "workflow_workflow_webhook_partial_update", "summary": "Update workflow webhook URL", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Webhook"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedWebhookParametersRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedWebhookParametersRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedWebhookParametersRequest"}}}}, "security": [{"WorkflowShareApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/WebhookParameters"}}}, "description": ""}}}}}, "components": {"schemas": {"AIProject": {"type": "object", "properties": {"project_id": {"type": "string", "format": "uuid", "readOnly": true}, "project_name": {"type": "string", "maxLength": 100}, "project_type": {"$ref": "#/components/schemas/ProjectTypeEnum"}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "user": {"type": "string", "format": "uuid"}}, "required": ["created_at", "project_id", "project_name", "project_type", "user"]}, "AddFileRequest": {"type": "object", "properties": {"data_type": {"$ref": "#/components/schemas/DataTypeEnum"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:pdf|csv|amr|flac|wav|ogg|mp3|mp4|webm|xml)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "metadata": {"type": "string", "minLength": 1, "description": "Optional parameter: Attach metadata to the uploaded file data in your database. Provide a stringified JSON with key-value pairs. Useful in `filter_document` when querying the language model, it allows you to filter data with your Chatbot by considering only documents that have the specified metadata."}, "provider": {"type": "string", "nullable": true, "minLength": 1, "description": "Select a provider to use, only for audio (speech-to-text) & pdf (ocr-async) files."}}, "required": ["data_type"]}, "AddImageRequest": {"type": "object", "properties": {"metadata": {"type": "string", "minLength": 1, "description": "Optional parameter: Attach metadata to the uploaded data in your database. Provide a stringified JSON with key-value pairs. Useful in `filter_document` when querying the language model, it allows you to filter data with your Chatbot by considering only documents that have the specified metadata. Key 'collection_name' is not allowed."}, "model": {"type": "string", "nullable": true, "minLength": 1, "default": "gemini-2.0-flash", "description": "Optional. Select the Language Model provider (e.g., `google/gemini-2.0-flash`, `openai/gpt-4-vision-preview`) to use for describing the uploaded content. This model will analyze the content and generate a text description, which will be stored in the RAG database. If left blank, a default model is used."}, "prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Optional. Specify a custom prompt to guide the Language Model in generating the description for the content. If no prompt is provided, a default prompt will be used to describe what's in the image/video. This allows for more specific or targeted analysis."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|png|webp|heic|heif)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}}, "AddTextRequest": {"type": "object", "properties": {"texts": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "LLM Query"}, "metadata": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "default": []}}, "required": ["texts"]}, "AddUrlRequest": {"type": "object", "properties": {"urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1}, "description": "Add multiple urls into the database, it loads all the text from HTML webpages into a document format."}, "js_render": {"type": "array", "items": {"type": "boolean"}, "description": "Enable JavaScript rendering for the provided URLs."}, "metadata": {"type": "array", "items": {"type": "object", "additionalProperties": {}}}}, "required": ["urls"]}, "AddVideoRequest": {"type": "object", "properties": {"metadata": {"type": "string", "minLength": 1, "description": "Optional parameter: Attach metadata to the uploaded data in your database. Provide a stringified JSON with key-value pairs. Useful in `filter_document` when querying the language model, it allows you to filter data with your Chatbot by considering only documents that have the specified metadata. Key 'collection_name' is not allowed."}, "model": {"type": "string", "nullable": true, "minLength": 1, "default": "gemini-2.0-flash", "description": "Optional. Select the Language Model provider (e.g., `google/gemini-2.0-flash`, `openai/gpt-4-vision-preview`) to use for describing the uploaded content. This model will analyze the content and generate a text description, which will be stored in the RAG database. If left blank, a default model is used."}, "prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Optional. Specify a custom prompt to guide the Language Model in generating the description for the content. If no prompt is provided, a default prompt will be used to describe what's in the image/video. This allows for more specific or targeted analysis."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mov|mp4|mpeg|avi|x\\-flv|mpg|webm|wmv|3gpp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}}, "AiDetectionItem": {"properties": {"text": {"title": "Text", "type": "string"}, "prediction": {"title": "Prediction", "type": "string"}, "ai_score": {"title": "Ai Score", "type": "integer"}, "ai_score_detail": {"title": "Ai Score Detail", "type": "integer"}}, "required": ["text", "prediction", "ai_score", "ai_score_detail"], "title": "AiDetectionItem", "type": "object"}, "AiProductFile": {"type": "object", "properties": {"file_id": {"type": "string", "format": "uuid", "readOnly": true}, "user": {"type": "string", "readOnly": true}, "project": {"type": "string", "readOnly": true}, "file_type": {"type": "string", "maxLength": 255}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "status": {"nullable": true, "oneOf": [{"$ref": "#/components/schemas/Status889Enum"}, {"$ref": "#/components/schemas/NullEnum"}]}, "error": {"type": "string", "nullable": true}, "task_id": {"type": "string", "nullable": true, "maxLength": 100}, "task_started_at": {"type": "string", "format": "date-time", "nullable": true}, "file_path": {"type": "string", "nullable": true}, "file_name": {"type": "string", "nullable": true, "maxLength": 1500}}, "required": ["created_at", "file_id", "file_type", "project", "user"]}, "AnonymizationAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "provider_params": {"type": "string", "description": "\nParameters specific to the provider that you want to send along the request.\n\nit should take a *provider* name as key and an object of parameters as value.\n\nExample:\n\n {\n \"deepgram\": {\n \"filler_words\": true,\n \"smart_format\": true,\n \"callback\": \"https://webhook.site/0000\"\n },\n \"assembly\": {\n \"webhook_url\": \"https://webhook.site/0000\"\n }\n }\n\nPlease refer to the documentation of each provider to see which parameters to send.\n"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}}, "required": ["providers"]}, "AnonymizationBoundingBox": {"properties": {"x_min": {"title": "X Min", "type": "integer"}, "x_max": {"title": "X Max", "type": "integer"}, "y_min": {"title": "Y Min", "type": "integer"}, "y_max": {"title": "Y Max", "type": "integer"}}, "required": ["x_min", "x_max", "y_min", "y_max"], "title": "AnonymizationBoundingBox", "type": "object"}, "AnonymizationEntity": {"description": "This model represents an entity extracted from the text.\n\nAttributes:\n offset (int): The offset of the entity in the text.\n length (int): The lenght of the entity in the text.\n category (CategoryType): The category of the entity.\n subcategory (SubCategoryType): The subcategory of the entity.\n original_label (str): The original label of the entity.\n content (str): The content of the entity.", "properties": {"offset": {"minimum": 0, "title": "Offset", "type": "integer"}, "length": {"exclusiveMinimum": true, "title": "Length", "type": "integer"}, "category": {"$ref": "#/components/schemas/CategoryType"}, "subcategory": {"$ref": "#/components/schemas/SubCategoryType"}, "original_label": {"minLength": 1, "title": "Original Label", "type": "string"}, "content": {"minLength": 1, "title": "Content", "type": "string"}, "confidence_score": {"maximum": 1.0, "minimum": 0.0, "title": "Confidence Score", "type": "integer"}}, "required": ["offset", "length", "category", "subcategory", "original_label", "content", "confidence_score"], "title": "AnonymizationEntity", "type": "object"}, "AnonymizationItem": {"properties": {"kind": {"title": "Kind", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "bounding_boxes": {"$ref": "#/components/schemas/AnonymizationBoundingBox"}}, "required": ["kind", "confidence", "bounding_boxes"], "title": "AnonymizationItem", "type": "object"}, "AskLLMRequest": {"type": "object", "properties": {"query": {"type": "string", "minLength": 1, "description": "Enter your question or query about the data. The large language model (LLM) will provide a response."}, "llm_provider": {"type": "string", "minLength": 1, "description": "Select a provider for the large language model for processing. Leave empty for default."}, "llm_model": {"type": "string", "minLength": 1, "description": "Specify the model to use for language processing. Leave empty for default."}, "k": {"type": "integer", "minimum": 1, "default": 3, "description": "How many results chunk you want to return"}, "history": {"type": "array", "items": {"type": "object", "additionalProperties": {}, "description": "A dictionary representing a single conversation in the previous history. Must contain 'user' and 'assistant' fields."}, "default": [], "description": "A list containing all the previous conversations between the user and the chatbot AI. Each item in the list should be a dictionary with two keys: 'user' and 'assistant'."}, "chatbot_global_action": {"type": "string", "minLength": 1, "description": "A system message that helps set the behavior of the assistant."}, "filter_documents": {"type": "object", "additionalProperties": {}, "default": {}, "description": "Filter uploaded documents based on their metadata. Specify key-value pairs where the key represents the metadata field and the value is the desired metadata value. Please ensure that the provided metadata keys are available in your database."}, "min_score": {"type": "number", "format": "double", "default": 0.0, "description": "A minimum score threshold for the model to consider a chunk as a valid response. Higher values mean the model will be more conservative and only return chunks that are more similar to the query. Lower values mean the model will be more open to returning chunks that are less similar to the query."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 1024, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "conversation_id": {"type": "string", "format": "uuid"}, "use_reranking": {"type": "boolean", "default": false, "description": "Activate/deactivate the reranking. This is experimental."}, "top_n": {"type": "integer", "default": 3, "description": "The number of documents returned by the reranker (experimental)"}}, "required": ["query"]}, "AskYodaProjectUpdate": {"type": "object", "properties": {"ocr_provider": {"type": "string", "default": "amazon"}, "speech_to_text_provider": {"type": "string", "default": "openai"}, "llm_provider": {"type": "string", "description": "Select a default LLM provider to use in your project."}, "llm_model": {"type": "string", "description": "Select a default Model for LLM provider to use in your project"}, "chunk_size": {"type": "integer", "maximum": 10000, "minimum": 1, "nullable": true}, "chunk_separators": {"type": "array", "items": {"type": "string"}, "nullable": true}}}, "AskYourDataProjectRequest": {"type": "object", "properties": {"credential": {"type": "string", "nullable": true, "description": "The credential resource name", "maxLength": 255}, "asset": {"type": "string", "nullable": true, "description": "The asset sub_resource name", "maxLength": 255}, "ocr_provider": {"type": "string", "minLength": 1, "default": "amazon"}, "speech_to_text_provider": {"type": "string", "minLength": 1, "default": "openai"}, "llm_provider": {"type": "string", "minLength": 1, "description": "Select a default LLM provider to use in your project."}, "llm_model": {"type": "string", "minLength": 1, "description": "Select a default Model for LLM provider to use in your project"}, "chunk_size": {"type": "integer", "maximum": 10000, "minimum": 1, "nullable": true}, "chunk_separators": {"type": "array", "items": {"type": "string"}, "nullable": true}, "project_name": {"type": "string", "minLength": 1, "description": "Project name"}, "collection_name": {"type": "string", "minLength": 1, "description": "Database Collection Name"}, "db_provider": {"allOf": [{"$ref": "#/components/schemas/DbProviderEnum"}], "default": "qdrant", "description": "Database Provider\n\n* `qdrant` - qdrant\n* `supabase` - supabase"}, "embeddings_provider": {"allOf": [{"$ref": "#/components/schemas/EmbeddingsProviderEnum"}], "description": "Select an embedding provider to use in your search database. Leave empty for default.\n\n* `openai` - openai\n* `cohere` - cohere\n* `google` - google\n* `mistral` - mistral\n* `jina` - jina"}}, "required": ["collection_name", "embeddings_provider", "project_name"]}, "AssetCreate": {"type": "object", "properties": {"sub_resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AssetCreateRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AssetList": {"type": "object", "properties": {"sub_resource": {"type": "string", "maxLength": 255}, "data": {"type": "string", "format": "byte", "readOnly": true}}, "required": ["data", "sub_resource"]}, "AssetListRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}}, "required": ["sub_resource"]}, "AssetUpdate": {"type": "object", "properties": {"sub_resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AssetUpdateRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AsyncJobList": {"type": "object", "properties": {"providers": {"type": "string"}, "nb": {"type": "integer"}, "nb_ok": {"type": "integer"}, "public_id": {"type": "string", "format": "uuid"}, "state": {"$ref": "#/components/schemas/StateEnum"}, "created_at": {"type": "string", "format": "date-time"}}, "required": ["created_at", "nb", "nb_ok", "providers", "public_id", "state"]}, "AsyncOcrRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}}, "required": ["providers"]}, "AsyncVideoAnalysisRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mov|mp4|avi)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "AutomlClassificationListProjectsResponse": {"properties": {"projects": {"items": {"$ref": "#/components/schemas/AutomlClassificationProject"}, "title": "Projects", "type": "array"}}, "required": ["projects"], "title": "AutomlClassificationListProjectsResponse", "type": "object"}, "AutomlClassificationPredictRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["project_id", "providers"]}, "AutomlClassificationProject": {"properties": {"project_id": {"format": "uuid", "title": "Project Id", "type": "string"}, "name": {"title": "Name", "type": "string"}, "provider": {"title": "Provider", "type": "string"}}, "required": ["project_id", "name", "provider"], "title": "AutomlClassificationProject", "type": "object"}, "AutomlClassificationTrainRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}}, "required": ["project_id", "providers"]}, "AutomlClassificationUploadDataRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}, "label": {"type": "string", "minLength": 1, "description": "Label of the image", "maxLength": 200}, "type_of_data": {"allOf": [{"$ref": "#/components/schemas/TypeOfDataEnum"}], "description": "Type of data : TRAINING or TEST\n\n* `TRAINING` - TRAINING\n* `TEST` - TEST"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["label", "project_id", "providers", "type_of_data"]}, "BadRequest": {"type": "object", "properties": {"error": {"$ref": "#/components/schemas/NestedBadRequest"}}, "required": ["error"]}, "BatchLaunchFailedRequest": {"properties": {"name": {"description": "Request name, if any were given", "title": "Name", "type": "string"}, "public_id": {"description": "Request ID", "title": "Public Id", "type": "integer"}, "body": {"description": "Parameters passed to the request", "title": "Body", "type": "object"}, "errors": {"description": "Error received from the request validator", "title": "Errors", "type": "object"}}, "required": ["name", "public_id", "body", "errors"], "title": "BatchLaunchFailedRequest", "type": "object"}, "BatchLaunchResponse": {"properties": {"job_id": {"description": "Job ID/name", "title": "Job Id", "type": "string"}, "nb_launched": {"description": "Number of successfully launched requests", "title": "Nb Launched", "type": "integer"}, "nb_failed": {"description": "Number of failed_requests", "title": "Nb Failed", "type": "integer"}, "total": {"description": "Total number of requests sent", "title": "Total", "type": "integer"}, "failed_requests": {"description": "if any requests failed, they will be shown in this list", "items": {"$ref": "#/components/schemas/BatchLaunchFailedRequest"}, "title": "Failed Requests", "type": "array"}}, "required": ["job_id", "nb_launched", "nb_failed", "total", "failed_requests"], "title": "BatchLaunchResponse", "type": "object"}, "BatchList": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 1023}, "status": {"$ref": "#/components/schemas/Status889Enum"}, "feature": {"type": "string", "readOnly": true}, "subfeature": {"type": "string", "readOnly": true}, "total_requests": {"type": "integer", "readOnly": true}, "nb_processing": {"type": "integer", "readOnly": true}, "nb_succeeded": {"type": "integer", "readOnly": true}, "nb_failed": {"type": "integer", "readOnly": true}, "get_response_url": {"type": "string", "readOnly": true}}, "required": ["feature", "get_response_url", "nb_failed", "nb_processing", "nb_succeeded", "subfeature", "total_requests"]}, "BatchRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "requests": {"type": "array", "items": {"type": "object", "additionalProperties": {}}}}, "required": ["requests"]}, "BatchResponseRequest": {"type": "object", "properties": {"public_id": {"type": "integer", "readOnly": true}, "status": {"$ref": "#/components/schemas/Status889Enum"}, "name": {"type": "string", "nullable": true, "maxLength": 1023}, "errors": {"nullable": true}, "response": {"nullable": true}}, "required": ["public_id"]}, "BlankEnum": {"enum": [""]}, "BoundingBox": {"description": "Bounding box of a word in the image\n\nAttributes:\n left (float): Left coordinate of the bounding box\n top (float): Top coordinate of the bounding box\n width (float): Width of the bounding box\n height (float): Height of the bounding box\n text (str): Text detected in the bounding box\n\nConstructor:\n from_json (classmethod): Create a new instance of BoundingBox from a JSON object\n from_normalized_vertices (classmethod): Create a new instance of BoundingBox from normalized vertices\n unknown (classmethod): Return a invalid bouding_box with all field filled with `-1`", "properties": {"left": {"description": "Left coordinate of the bounding box", "title": "Left", "type": "integer"}, "top": {"description": "Top coordinate of the bounding box", "title": "Top", "type": "integer"}, "width": {"description": "Width of the bounding box", "title": "Width", "type": "integer"}, "height": {"description": "Height of the bounding box", "title": "Height", "type": "integer"}}, "required": ["left", "top", "width", "height"], "title": "BoundingBox", "type": "object"}, "Bounding_box": {"properties": {"text": {"title": "Text", "type": "string"}, "left": {"title": "Left", "type": "integer"}, "top": {"title": "Top", "type": "integer"}, "width": {"title": "Width", "type": "integer"}, "height": {"title": "Height", "type": "integer"}}, "required": ["text", "left", "top", "width", "height"], "title": "Bounding_box", "type": "object"}, "BoundixBoxOCRTable": {"properties": {"left": {"title": "Left", "type": "integer"}, "top": {"title": "Top", "type": "integer"}, "width": {"title": "Width", "type": "integer"}, "height": {"title": "Height", "type": "integer"}}, "required": ["left", "top", "width", "height"], "title": "BoundixBoxOCRTable", "type": "object"}, "CategoryType": {"description": "This enum are used to categorize the explicit content extracted from the text", "enum": ["Toxic", "Content", "Sexual", "Violence", "DrugAndAlcohol", "Finance", "HateAndExtremism", "Safe", "Other"], "title": "CategoryType", "type": "string"}, "Cell": {"properties": {"text": {"title": "Text", "type": "string"}, "row_index": {"title": "Row Index", "type": "integer"}, "col_index": {"title": "Col Index", "type": "integer"}, "row_span": {"title": "Row Span", "type": "integer"}, "col_span": {"title": "Col Span", "type": "integer"}, "confidence": {"title": "Confidence", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/BoundixBoxOCRTable"}, "is_header": {"default": false, "title": "Is Header", "type": "boolean"}}, "required": ["text", "row_index", "col_index", "row_span", "col_span", "confidence", "bounding_box"], "title": "Cell", "type": "object"}, "ChatAvailableToolsRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The name of your tool/function"}, "description": {"type": "string"}, "parameters": {"description": "The tool's parameters are specified using a JSON Schema object. Detailed format documentation is available in the [JSON Schema reference](https://json-schema.org/understanding-json-schema/).\n\n**Make sure to well describe each parameter for best results.**\n\n\nExample for a weather tool:\n\n {\n \"type\": \"object\",\n \"properties\": {\n \"location\": {\n \"type\": \"string\"\n \"description\": \"The geographical location for which weather data is requested.\"\n },\n \"unit\": {\n \"type\": \"string\", \"enum\": [\"Celsius\", \"Fahrenheit\"]\n \"description\": \"The unit of measurement for temperature.\"\n }\n },\n \"required\": [\"location\"]\n }\n "}}}, "ChatCompletionChoice": {"properties": {"index": {"description": "The index of this completion choice", "title": "Index", "type": "integer"}, "message": {"allOf": [{"$ref": "#/components/schemas/ChatMessage"}], "description": "The chat completion message"}, "finish_reason": {"description": "The reason the completion finished: 'stop', 'length', 'tool_calls', 'content_filter', or 'function_call'", "title": "Finish Reason", "type": "string"}}, "required": ["index", "message", "finish_reason"], "title": "ChatCompletionChoice", "type": "object"}, "ChatCompletionUsage": {"properties": {"completion_tokens_details": {"allOf": [{"$ref": "#/components/schemas/UsageTokensDetails"}], "description": "Number of tokens in the generated completion"}, "prompt_tokens_details": {"allOf": [{"$ref": "#/components/schemas/UsageTokensDetails"}], "description": "Number of tokens in the prompt"}, "total_tokens": {"description": "Total number of tokens used (prompt + completion)", "title": "Total Tokens", "type": "integer"}}, "required": ["completion_tokens_details", "prompt_tokens_details", "total_tokens"], "title": "ChatCompletionUsage", "type": "object"}, "ChatMessage": {"properties": {"role": {"allOf": [{"$ref": "#/components/schemas/ChatRole"}], "description": "The role of the message author"}, "content": {"default": null, "description": "The content of the message", "title": "Content", "type": "string"}, "name": {"default": null, "description": "The name of the author of this message", "title": "Name", "type": "string"}, "function_call": {"default": null, "description": "The function call information", "title": "Function Call", "type": "object"}, "tool_calls": {"default": null, "description": "The tool call information", "items": {"type": "object"}, "title": "Tool Calls", "type": "array"}}, "required": ["role"], "title": "ChatMessage", "type": "object"}, "ChatMessageContent": {"properties": {"media_url": {"default": null, "title": "Media Url", "type": "string"}, "media_base64": {"default": null, "title": "Media Base64", "type": "string"}, "text": {"default": null, "title": "Text", "type": "string"}, "media_type": {"default": null, "title": "Media Type", "type": "string"}}, "title": "ChatMessageContent", "type": "object"}, "ChatMessageDataClass": {"properties": {"role": {"title": "Role", "type": "string"}, "content": {"items": {"$ref": "#/components/schemas/ChatMessage"}, "title": "Content", "type": "array"}}, "required": ["role"], "title": "ChatMessageDataClass", "type": "object"}, "ChatMessageRequest": {"type": "object", "properties": {"role": {"type": "string", "minLength": 1}, "message": {}, "tools": {"type": "array", "items": {"$ref": "#/components/schemas/ChatAvailableToolsRequest"}, "nullable": true}, "tool_calls": {"type": "array", "items": {"$ref": "#/components/schemas/ChatToolCallsRequest"}, "nullable": true}}, "required": ["message", "role"]}, "ChatRole": {"enum": ["system", "user", "assistant", "function", "tool"], "title": "ChatRole", "type": "string"}, "ChatToolCallsRequest": {"type": "object", "properties": {"id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "arguments": {"type": "string", "minLength": 1}}, "required": ["arguments", "id", "name"]}, "ChatToolResultRequest": {"type": "object", "properties": {"id": {"type": "string", "minLength": 1, "description": "the id of the `tool_call` used to generate result"}, "result": {"type": "string", "minLength": 1, "description": "the result of your function"}}, "required": ["id", "result"]}, "ContentNSFW": {"properties": {"timestamp": {"title": "Timestamp", "type": "integer"}, "confidence": {"title": "Confidence", "type": "integer"}, "category": {"title": "Category", "type": "string"}}, "required": ["timestamp", "confidence", "category"], "title": "ContentNSFW", "type": "object"}, "Conversation": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "name": {"type": "string", "nullable": true, "maxLength": 255}}, "required": ["id"]}, "ConversationDetail": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "name": {"type": "string", "nullable": true, "maxLength": 255}, "messages": {"type": "array", "items": {"$ref": "#/components/schemas/Message"}, "readOnly": true}}, "required": ["id", "messages"]}, "ConversationDetailRequest": {"type": "object", "properties": {"name": {"type": "string", "nullable": true, "maxLength": 255}}}, "ConversationRequest": {"type": "object", "properties": {"name": {"type": "string", "nullable": true, "maxLength": 255}}}, "CostMonitoringResponse": {"properties": {"response": {"items": {"$ref": "#/components/schemas/TokenData"}, "title": "Response", "type": "array"}}, "required": ["response"], "title": "CostMonitoringResponse", "type": "object"}, "Country": {"properties": {"name": {"title": "Name", "type": "string"}, "alpha2": {"title": "Alpha2", "type": "string"}, "alpha3": {"title": "Alpha3", "type": "string"}, "confidence": {"default": null, "title": "Confidence", "type": "integer"}}, "required": ["name", "alpha2", "alpha3"], "title": "Country", "type": "object"}, "CustomDocumentParsingAsyncBoundingBox": {"properties": {"left": {"title": "Left", "type": "integer"}, "top": {"title": "Top", "type": "integer"}, "width": {"title": "Width", "type": "integer"}, "height": {"title": "Height", "type": "integer"}}, "required": ["left", "top", "width", "height"], "title": "CustomDocumentParsingAsyncBoundingBox", "type": "object"}, "CustomDocumentParsingAsyncItem": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "value": {"title": "Value", "type": "string"}, "query": {"title": "Query", "type": "string"}, "bounding_box": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncBoundingBox"}, "page": {"title": "Page", "type": "integer"}}, "required": ["confidence", "value", "query", "bounding_box", "page"], "title": "CustomDocumentParsingAsyncItem", "type": "object"}, "CustomDocumentParsingAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "queries": {"type": "string", "minLength": 1, "description": "Your queries need to be a list of dict containing the questions you want answered and the page to look for the information in : '[{'query':'your query','pages':'your pages'},{'query':'your query','pages':'your pages'}]'"}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers", "queries"]}, "CustomTokenUpdate": {"type": "object", "properties": {"name": {"type": "string", "readOnly": true, "description": "The token name"}, "token_type": {"allOf": [{"$ref": "#/components/schemas/TokenTypeEnum"}], "readOnly": true}, "balance": {"type": "number", "format": "double", "maximum": 100000, "minimum": -100000, "exclusiveMaximum": true, "exclusiveMinimum": true, "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}, "required": ["name", "token_type"]}, "CustomTokensCreate": {"type": "object", "properties": {"name": {"type": "string", "description": "The token name", "maxLength": 200}, "token_type": {"$ref": "#/components/schemas/TokenTypeEnum"}, "balance": {"type": "string", "format": "decimal", "pattern": "^-?\\d{0,5}(?:\\.\\d{0,9})?$", "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}, "required": ["name"]}, "CustomTokensCreateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The token name", "maxLength": 200}, "token_type": {"$ref": "#/components/schemas/TokenTypeEnum"}, "balance": {"type": "string", "format": "decimal", "pattern": "^-?\\d{0,5}(?:\\.\\d{0,9})?$", "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}, "required": ["name"]}, "CustomTokensList": {"type": "object", "properties": {"name": {"type": "string", "description": "The token name", "maxLength": 200}, "token": {"type": "string", "nullable": true, "maxLength": 2000}, "token_type": {"$ref": "#/components/schemas/TokenTypeEnum"}, "balance": {"type": "number", "format": "double", "maximum": 100000, "minimum": -100000, "exclusiveMaximum": true, "exclusiveMinimum": true, "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}}, "required": ["name"]}, "DataTypeEnum": {"enum": ["pdf", "audio", "csv", "xml"], "type": "string", "description": "* `pdf` - pdf\n* `audio` - audio\n* `csv` - csv\n* `xml` - xml"}, "DbProviderEnum": {"enum": ["qdrant", "supabase"], "type": "string", "description": "* `qdrant` - qdrant\n* `supabase` - supabase"}, "DetailPerFrame": {"properties": {"position": {"title": "Position", "type": "integer"}, "score": {"maximum": 1.0, "minimum": 0.0, "title": "Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/PredictionB20Enum"}], "title": "Prediction"}}, "required": ["position", "score", "prediction"], "title": "DetailPerFrame", "type": "object"}, "DetailTypeEnum": {"enum": ["resolution", "document_type"], "type": "string", "description": "* `resolution` - Resolution\n* `document_type` - Document Type"}, "Details": {"properties": {"total_cost": {"title": "Total Cost", "type": "integer"}, "details": {"title": "Details", "type": "integer"}, "cost_per_provider": {"additionalProperties": {"type": "integer"}, "title": "Cost Per Provider", "type": "object"}}, "required": ["total_cost", "details", "cost_per_provider"], "title": "Details", "type": "object"}, "DocumentTypeEnum": {"enum": ["auto-detect", "invoice", "receipt"], "type": "string", "description": "* `auto-detect` - auto-detect\n* `invoice` - invoice\n* `receipt` - receipt"}, "EmbeddingDataClass": {"properties": {"embedding": {"items": {"type": "integer"}, "title": "Embedding", "type": "array"}}, "required": ["embedding"], "title": "EmbeddingDataClass", "type": "object"}, "EmbeddingsProviderEnum": {"enum": ["openai", "cohere", "google", "mistral", "jina"], "type": "string", "description": "* `openai` - openai\n* `cohere` - cohere\n* `google` - google\n* `mistral` - mistral\n* `jina` - jina"}, "EmotionItem": {"description": "This class is used in EmotionAnalysisDataClass to list emotion analysed.\nArgs:\n - emotion (EmotionEnum): emotion of the text\n - emotion_score (float): score of the emotion", "properties": {"emotion": {"title": "Emotion", "type": "string"}, "emotion_score": {"maximum": 100.0, "minimum": 0.0, "title": "Emotion Score", "type": "integer"}}, "required": ["emotion", "emotion_score"], "title": "EmotionItem", "type": "object"}, "Entity": {"properties": {"type": {"description": "Recognized Entity type", "title": "Type", "type": "string"}, "text": {"description": "Text corresponding to the entity", "title": "Text", "type": "string"}, "sentiment": {"allOf": [{"$ref": "#/components/schemas/EntitySentimentEnum"}], "title": "Sentiment"}, "begin_offset": {"default": null, "title": "Begin Offset", "type": "integer"}, "end_offset": {"default": null, "title": "End Offset", "type": "integer"}}, "required": ["type", "text", "sentiment"], "title": "Entity", "type": "object"}, "EntitySentimentEnum": {"enum": ["Positive", "Negative", "Neutral", "Mixed"], "type": "string"}, "Error": {"type": "object", "properties": {"error": {"$ref": "#/components/schemas/NestedError"}}, "required": ["error"]}, "ExecutionContentCreate": {"type": "object", "properties": {"status": {"$ref": "#/components/schemas/ExecutionContentCreateStatusEnum"}, "results": {}, "last_node_executed": {"type": "string"}}, "required": ["last_node_executed", "results", "status"]}, "ExecutionContentCreateStatusEnum": {"enum": ["success", "failed", "processing"], "type": "string", "description": "* `success` - Success\n* `failed` - Failed\n* `processing` - Processing"}, "ExecutionExampleSuccessCreate": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "id": {"type": "string"}, "content": {"$ref": "#/components/schemas/ExecutionContentCreate"}, "created_at": {"type": "string", "format": "date-time"}, "updated_at": {"type": "string", "format": "date-time"}}, "required": ["content", "created_at", "id", "updated_at"]}, "ExecutionFakeCreateRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "my_invoice": {"type": "string", "minLength": 1, "default": "https://example.com/public/invoice_123.pdf", "description": "An example of an input parameter specified in the input node"}}}, "ExecutionList": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "status": {"type": "string", "nullable": true, "maxLength": 30}, "created_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}}, "required": ["created_at", "id", "updated_at"]}, "ExplicitItem": {"properties": {"label": {"description": "", "title": "Label", "type": "string"}, "likelihood": {"description": "", "title": "Likelihood", "type": "integer"}, "likelihood_score": {"description": "", "title": "Likelihood Score", "type": "integer"}, "category": {"allOf": [{"$ref": "#/components/schemas/CategoryType"}], "description": "The category of the detected content. Possible values include: 'Toxic', 'Content', 'Sexual', 'Violence', 'DrugAndAlcohol', 'Finance', 'HateAndExtremism', 'Safe', 'Other'."}, "subcategory": {"description": "The subcategory of content. Possible values:\n\nToxic Subcategories:\n- Insult\n- Obscene\n- Derogatory\n- Profanity\n- Threat\n- Toxic\n\nContent Subcategories:\n- MiddleFinger\n- PublicSafety\n- Health\n- Explicit\n- QRCode\n- Medical\n- Politics\n- Legal\n\nSexual Subcategories:\n- SexualActivity\n- SexualSituations\n- Nudity\n- PartialNudity\n- Suggestive\n- AdultToys\n- RevealingClothes\n- Sexual\n\nViolence Subcategories:\n- GraphicViolenceOrGore\n- PhysicalViolence\n- WeaponViolence\n- Violence\n\nDrug and Alcohol Subcategories:\n- DrugProducts\n- DrugUse\n- Tobacco\n- Smoking\n- Alcohol\n- Drinking\n\nFinance Subcategories:\n- Gambling\n- Finance\n- MoneyContent\n\nHate and Extremism Subcategories:\n- Hate\n- Harassment\n- Threatening\n- Extremist\n- Racy\n\nSafe Subcategories:\n- Safe\n- NotSafe\n\nOther Subcategories:\n- Spoof\n- Religion\n- Offensive\n- Other", "title": "Subcategory", "type": "string"}}, "required": ["label", "likelihood", "likelihood_score", "category", "subcategory"], "title": "ExplicitItem", "type": "object"}, "ExtractedTopic": {"properties": {"category": {"title": "Category", "type": "string"}, "importance": {"title": "Importance", "type": "integer"}}, "required": ["category", "importance"], "title": "ExtractedTopic", "type": "object"}, "FaceAccessories": {"properties": {"sunglasses": {"title": "Sunglasses", "type": "integer"}, "reading_glasses": {"title": "Reading Glasses", "type": "integer"}, "swimming_goggles": {"title": "Swimming Goggles", "type": "integer"}, "face_mask": {"title": "Face Mask", "type": "integer"}, "eyeglasses": {"title": "Eyeglasses", "type": "integer"}, "headwear": {"title": "Headwear", "type": "integer"}}, "required": ["sunglasses", "reading_glasses", "swimming_goggles", "face_mask", "eyeglasses", "headwear"], "title": "FaceAccessories", "type": "object"}, "FaceAttributes": {"properties": {"headwear": {"title": "Headwear", "type": "integer"}, "frontal_gaze": {"title": "Frontal Gaze", "type": "integer"}, "eyes_visible": {"title": "Eyes Visible", "type": "integer"}, "glasses": {"title": "Glasses", "type": "integer"}, "mouth_open": {"title": "Mouth Open", "type": "integer"}, "smiling": {"title": "Smiling", "type": "integer"}, "brightness": {"title": "Brightness", "type": "integer"}, "sharpness": {"title": "Sharpness", "type": "integer"}, "pose": {"$ref": "#/components/schemas/VideoFacePoses"}}, "required": ["headwear", "frontal_gaze", "eyes_visible", "glasses", "mouth_open", "smiling", "brightness", "sharpness", "pose"], "title": "FaceAttributes", "type": "object"}, "FaceBoundingBox": {"properties": {"x_min": {"title": "X Min", "type": "integer"}, "x_max": {"title": "X Max", "type": "integer"}, "y_min": {"title": "Y Min", "type": "integer"}, "y_max": {"title": "Y Max", "type": "integer"}}, "required": ["x_min", "x_max", "y_min", "y_max"], "title": "FaceBoundingBox", "type": "object"}, "FaceCompareBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "FaceCompareBoundingBox", "type": "object"}, "FaceEmotions": {"properties": {"joy": {"title": "Joy", "type": "integer"}, "sorrow": {"title": "Sorrow", "type": "integer"}, "anger": {"title": "Anger", "type": "integer"}, "surprise": {"title": "Surprise", "type": "integer"}, "disgust": {"title": "Disgust", "type": "integer"}, "fear": {"title": "Fear", "type": "integer"}, "confusion": {"title": "Confusion", "type": "integer"}, "calm": {"title": "Calm", "type": "integer"}, "unknown": {"title": "Unknown", "type": "integer"}, "neutral": {"title": "Neutral", "type": "integer"}, "contempt": {"title": "Contempt", "type": "integer"}}, "required": ["joy", "sorrow", "anger", "surprise", "disgust", "fear", "confusion", "calm", "unknown", "neutral", "contempt"], "title": "FaceEmotions", "type": "object"}, "FaceFacialHair": {"properties": {"moustache": {"title": "Moustache", "type": "integer"}, "beard": {"title": "Beard", "type": "integer"}, "sideburns": {"title": "Sideburns", "type": "integer"}}, "required": ["moustache", "beard", "sideburns"], "title": "FaceFacialHair", "type": "object"}, "FaceFeatures": {"properties": {"eyes_open": {"title": "Eyes Open", "type": "integer"}, "smile": {"title": "Smile", "type": "integer"}, "mouth_open": {"title": "Mouth Open", "type": "integer"}}, "required": ["eyes_open", "smile", "mouth_open"], "title": "FaceFeatures", "type": "object"}, "FaceHair": {"properties": {"hair_color": {"items": {"$ref": "#/components/schemas/FaceHairColor"}, "title": "Hair Color", "type": "array"}, "bald": {"title": "Bald", "type": "integer"}, "invisible": {"title": "Invisible", "type": "boolean"}}, "required": ["bald", "invisible"], "title": "FaceHair", "type": "object"}, "FaceHairColor": {"properties": {"color": {"title": "Color", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["color", "confidence"], "title": "FaceHairColor", "type": "object"}, "FaceItem": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "landmarks": {"$ref": "#/components/schemas/FaceLandmarks"}, "emotions": {"$ref": "#/components/schemas/FaceEmotions"}, "poses": {"$ref": "#/components/schemas/FacePoses"}, "age": {"title": "Age", "type": "integer"}, "gender": {"title": "Gender", "type": "string"}, "bounding_box": {"$ref": "#/components/schemas/FaceBoundingBox"}, "hair": {"$ref": "#/components/schemas/FaceHair"}, "facial_hair": {"$ref": "#/components/schemas/FaceFacialHair"}, "quality": {"$ref": "#/components/schemas/FaceQuality"}, "makeup": {"$ref": "#/components/schemas/FaceMakeup"}, "accessories": {"$ref": "#/components/schemas/FaceAccessories"}, "occlusions": {"$ref": "#/components/schemas/FaceOcclusions"}, "features": {"$ref": "#/components/schemas/FaceFeatures"}}, "required": ["confidence", "landmarks", "emotions", "poses", "age", "gender", "bounding_box", "hair", "facial_hair", "quality", "makeup", "accessories", "occlusions", "features"], "title": "FaceItem", "type": "object"}, "FaceLandmarks": {"properties": {"left_eye": {"items": {"type": "integer"}, "title": "Left Eye", "type": "array"}, "left_eye_top": {"items": {"type": "integer"}, "title": "Left Eye Top", "type": "array"}, "left_eye_right": {"items": {"type": "integer"}, "title": "Left Eye Right", "type": "array"}, "left_eye_bottom": {"items": {"type": "integer"}, "title": "Left Eye Bottom", "type": "array"}, "left_eye_left": {"items": {"type": "integer"}, "title": "Left Eye Left", "type": "array"}, "right_eye": {"items": {"type": "integer"}, "title": "Right Eye", "type": "array"}, "right_eye_top": {"items": {"type": "integer"}, "title": "Right Eye Top", "type": "array"}, "right_eye_right": {"items": {"type": "integer"}, "title": "Right Eye Right", "type": "array"}, "right_eye_bottom": {"items": {"type": "integer"}, "title": "Right Eye Bottom", "type": "array"}, "right_eye_left": {"items": {"type": "integer"}, "title": "Right Eye Left", "type": "array"}, "left_eyebrow_left": {"items": {"type": "integer"}, "title": "Left Eyebrow Left", "type": "array"}, "left_eyebrow_right": {"items": {"type": "integer"}, "title": "Left Eyebrow Right", "type": "array"}, "left_eyebrow_top": {"items": {"type": "integer"}, "title": "Left Eyebrow Top", "type": "array"}, "right_eyebrow_left": {"items": {"type": "integer"}, "title": "Right Eyebrow Left", "type": "array"}, "right_eyebrow_right": {"items": {"type": "integer"}, "title": "Right Eyebrow Right", "type": "array"}, "left_pupil": {"items": {"type": "integer"}, "title": "Left Pupil", "type": "array"}, "right_pupil": {"items": {"type": "integer"}, "title": "Right Pupil", "type": "array"}, "nose_tip": {"items": {"type": "integer"}, "title": "Nose Tip", "type": "array"}, "nose_bottom_right": {"items": {"type": "integer"}, "title": "Nose Bottom Right", "type": "array"}, "nose_bottom_left": {"items": {"type": "integer"}, "title": "Nose Bottom Left", "type": "array"}, "mouth_left": {"items": {"type": "integer"}, "title": "Mouth Left", "type": "array"}, "mouth_right": {"items": {"type": "integer"}, "title": "Mouth Right", "type": "array"}, "right_eyebrow_top": {"items": {"type": "integer"}, "title": "Right Eyebrow Top", "type": "array"}, "midpoint_between_eyes": {"items": {"type": "integer"}, "title": "Midpoint Between Eyes", "type": "array"}, "nose_bottom_center": {"items": {"type": "integer"}, "title": "Nose Bottom Center", "type": "array"}, "nose_left_alar_out_tip": {"items": {"type": "integer"}, "title": "Nose Left Alar Out Tip", "type": "array"}, "nose_left_alar_top": {"items": {"type": "integer"}, "title": "Nose Left Alar Top", "type": "array"}, "nose_right_alar_out_tip": {"items": {"type": "integer"}, "title": "Nose Right Alar Out Tip", "type": "array"}, "nose_right_alar_top": {"items": {"type": "integer"}, "title": "Nose Right Alar Top", "type": "array"}, "nose_root_left": {"items": {"type": "integer"}, "title": "Nose Root Left", "type": "array"}, "nose_root_right": {"items": {"type": "integer"}, "title": "Nose Root Right", "type": "array"}, "upper_lip": {"items": {"type": "integer"}, "title": "Upper Lip", "type": "array"}, "under_lip": {"items": {"type": "integer"}, "title": "Under Lip", "type": "array"}, "under_lip_bottom": {"items": {"type": "integer"}, "title": "Under Lip Bottom", "type": "array"}, "under_lip_top": {"items": {"type": "integer"}, "title": "Under Lip Top", "type": "array"}, "upper_lip_bottom": {"items": {"type": "integer"}, "title": "Upper Lip Bottom", "type": "array"}, "upper_lip_top": {"items": {"type": "integer"}, "title": "Upper Lip Top", "type": "array"}, "mouth_center": {"items": {"type": "integer"}, "title": "Mouth Center", "type": "array"}, "mouth_top": {"items": {"type": "integer"}, "title": "Mouth Top", "type": "array"}, "mouth_bottom": {"items": {"type": "integer"}, "title": "Mouth Bottom", "type": "array"}, "left_ear_tragion": {"items": {"type": "integer"}, "title": "Left Ear Tragion", "type": "array"}, "right_ear_tragion": {"items": {"type": "integer"}, "title": "Right Ear Tragion", "type": "array"}, "forehead_glabella": {"items": {"type": "integer"}, "title": "Forehead Glabella", "type": "array"}, "chin_gnathion": {"items": {"type": "integer"}, "title": "Chin Gnathion", "type": "array"}, "chin_left_gonion": {"items": {"type": "integer"}, "title": "Chin Left Gonion", "type": "array"}, "chin_right_gonion": {"items": {"type": "integer"}, "title": "Chin Right Gonion", "type": "array"}, "upper_jawline_left": {"items": {"type": "integer"}, "title": "Upper Jawline Left", "type": "array"}, "mid_jawline_left": {"items": {"type": "integer"}, "title": "Mid Jawline Left", "type": "array"}, "mid_jawline_right": {"items": {"type": "integer"}, "title": "Mid Jawline Right", "type": "array"}, "upper_jawline_right": {"items": {"type": "integer"}, "title": "Upper Jawline Right", "type": "array"}, "left_cheek_center": {"items": {"type": "integer"}, "title": "Left Cheek Center", "type": "array"}, "right_cheek_center": {"items": {"type": "integer"}, "title": "Right Cheek Center", "type": "array"}}, "title": "FaceLandmarks", "type": "object"}, "FaceMakeup": {"properties": {"eye_make": {"title": "Eye Make", "type": "boolean"}, "lip_make": {"title": "Lip Make", "type": "boolean"}}, "required": ["eye_make", "lip_make"], "title": "FaceMakeup", "type": "object"}, "FaceMatch": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/FaceCompareBoundingBox"}}, "required": ["confidence", "bounding_box"], "title": "FaceMatch", "type": "object"}, "FaceOcclusions": {"properties": {"eye_occluded": {"title": "Eye Occluded", "type": "boolean"}, "forehead_occluded": {"title": "Forehead Occluded", "type": "boolean"}, "mouth_occluded": {"title": "Mouth Occluded", "type": "boolean"}}, "required": ["eye_occluded", "forehead_occluded", "mouth_occluded"], "title": "FaceOcclusions", "type": "object"}, "FacePoses": {"properties": {"pitch": {"title": "Pitch", "type": "integer"}, "roll": {"title": "Roll", "type": "integer"}, "yaw": {"title": "Yaw", "type": "integer"}}, "required": ["pitch", "roll", "yaw"], "title": "FacePoses", "type": "object"}, "FaceQuality": {"properties": {"noise": {"title": "Noise", "type": "integer"}, "exposure": {"title": "Exposure", "type": "integer"}, "blur": {"title": "Blur", "type": "integer"}, "brightness": {"title": "Brightness", "type": "integer"}, "sharpness": {"title": "Sharpness", "type": "integer"}}, "required": ["noise", "exposure", "blur", "brightness", "sharpness"], "title": "FaceQuality", "type": "object"}, "FallbackTypeEnum": {"enum": ["rerun", "continue"], "type": "string", "description": "* `rerun` - Rerun\n* `continue` - Continue"}, "Feature": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 255}, "fullname": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true, "maxLength": 1000}}, "required": ["name"]}, "FieldError": {"type": "object", "properties": {"": {"type": "array", "items": {"type": "string"}}}, "required": [""]}, "FinalStatusEnum": {"enum": ["sucess", "fail"], "type": "string"}, "FinancialBankInformation": {"properties": {"iban": {"default": null, "description": "International Bank Account Number.", "title": "Iban", "type": "string"}, "swift": {"default": null, "description": "Society for Worldwide Interbank Financial Telecommunication code.", "title": "Swift", "type": "string"}, "bsb": {"default": null, "description": "Bank State Branch code (Australia).", "title": "Bsb", "type": "string"}, "sort_code": {"default": null, "description": "Sort code for UK banks.", "title": "Sort Code", "type": "string"}, "account_number": {"default": null, "description": "Bank account number.", "title": "Account Number", "type": "string"}, "routing_number": {"default": null, "description": "Routing number for banks in the United States.", "title": "Routing Number", "type": "string"}, "bic": {"default": null, "description": "Bank Identifier Code.", "title": "Bic", "type": "string"}}, "title": "FinancialBankInformation", "type": "object"}, "FinancialBarcode": {"properties": {"value": {"title": "Value", "type": "string"}, "type": {"title": "Type", "type": "string"}}, "required": ["value", "type"], "title": "FinancialBarcode", "type": "object"}, "FinancialCustomerInformation": {"properties": {"name": {"default": null, "description": "The name of the invoiced customer.", "title": "Name", "type": "string"}, "id_reference": {"default": null, "description": "Unique reference ID for the customer.", "title": "Id Reference", "type": "string"}, "mailling_address": {"default": null, "description": "The mailing address of the customer.", "title": "Mailling Address", "type": "string"}, "billing_address": {"default": null, "description": "The explicit billing address for the customer.", "title": "Billing Address", "type": "string"}, "shipping_address": {"default": null, "description": "The shipping address for the customer.", "title": "Shipping Address", "type": "string"}, "service_address": {"default": null, "description": "The service address associated with the customer.", "title": "Service Address", "type": "string"}, "remittance_address": {"default": null, "description": "The address to which payments should be remitted.", "title": "Remittance Address", "type": "string"}, "email": {"default": null, "description": "The email address of the customer.", "title": "Email", "type": "string"}, "phone": {"default": null, "description": "The phone number associated with the customer.", "title": "Phone", "type": "string"}, "vat_number": {"default": null, "description": "VAT (Value Added Tax) number of the customer.", "title": "Vat Number", "type": "string"}, "abn_number": {"default": null, "description": "ABN (Australian Business Number) of the customer.", "title": "Abn Number", "type": "string"}, "gst_number": {"default": null, "description": "GST (Goods and Services Tax) number of the customer.", "title": "Gst Number", "type": "string"}, "pan_number": {"default": null, "description": "PAN (Permanent Account Number) of the customer.", "title": "Pan Number", "type": "string"}, "business_number": {"default": null, "description": "Business registration number of the customer.", "title": "Business Number", "type": "string"}, "siret_number": {"default": null, "description": "SIRET (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises et de leurs \u00c9tablissements) number of the customer.", "title": "Siret Number", "type": "string"}, "siren_number": {"default": null, "description": "SIREN (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises) number of the customer.", "title": "Siren Number", "type": "string"}, "customer_number": {"default": null, "description": "Customer identification number.", "title": "Customer Number", "type": "string"}, "coc_number": {"default": null, "description": "Chamber of Commerce registration number.", "title": "Coc Number", "type": "string"}, "fiscal_number": {"default": null, "description": "Fiscal identification number of the customer.", "title": "Fiscal Number", "type": "string"}, "registration_number": {"default": null, "description": "Official registration number of the customer.", "title": "Registration Number", "type": "string"}, "tax_id": {"default": null, "description": "Tax identification number of the customer.", "title": "Tax Id", "type": "string"}, "website": {"default": null, "description": "The website associated with the customer.", "title": "Website", "type": "string"}, "remit_to_name": {"default": null, "description": "The name associated with the customer's remittance address.", "title": "Remit To Name", "type": "string"}, "city": {"default": null, "description": "The city associated with the customer's address.", "title": "City", "type": "string"}, "country": {"default": null, "description": "The country associated with the customer's address.", "title": "Country", "type": "string"}, "house_number": {"default": null, "description": "The house number associated with the customer's address.", "title": "House Number", "type": "string"}, "province": {"default": null, "description": "The province associated with the customer's address.", "title": "Province", "type": "string"}, "street_name": {"default": null, "description": "The street name associated with the customer's address.", "title": "Street Name", "type": "string"}, "zip_code": {"default": null, "description": "The ZIP code associated with the customer's address.", "title": "Zip Code", "type": "string"}, "municipality": {"default": null, "description": "The municipality associated with the customer's address.", "title": "Municipality", "type": "string"}}, "title": "FinancialCustomerInformation", "type": "object"}, "FinancialDocumentInformation": {"properties": {"invoice_receipt_id": {"default": null, "description": "Identifier for the invoice.", "title": "Invoice Receipt Id", "type": "string"}, "purchase_order": {"default": null, "description": "Purchase order related to the document.", "title": "Purchase Order", "type": "string"}, "invoice_date": {"default": null, "description": "Date of the invoice.", "title": "Invoice Date", "type": "string"}, "time": {"default": null, "description": "Time associated with the document.", "title": "Time", "type": "string"}, "invoice_due_date": {"default": null, "description": "Due date for the invoice.", "title": "Invoice Due Date", "type": "string"}, "service_start_date": {"default": null, "description": "Start date of the service associated with the document.", "title": "Service Start Date", "type": "string"}, "service_end_date": {"default": null, "description": "End date of the service associated with the document.", "title": "Service End Date", "type": "string"}, "reference": {"default": null, "description": "Reference number associated with the document.", "title": "Reference", "type": "string"}, "biller_code": {"default": null, "description": "Biller code associated with the document.", "title": "Biller Code", "type": "string"}, "order_date": {"default": null, "description": "Date of the order associated with the document.", "title": "Order Date", "type": "string"}, "tracking_number": {"default": null, "description": "Tracking number associated with the document.", "title": "Tracking Number", "type": "string"}, "barcodes": {"description": "List of barcodes associated with the document.", "items": {"$ref": "#/components/schemas/FinancialBarcode"}, "title": "Barcodes", "type": "array"}}, "title": "FinancialDocumentInformation", "type": "object"}, "FinancialDocumentMetadata": {"properties": {"document_index": {"default": null, "description": "Index of the detected document.", "title": "Document Index", "type": "integer"}, "document_page_number": {"default": null, "description": "Page number within the document.", "title": "Document Page Number", "type": "integer"}, "document_type": {"default": null, "description": "Type or category of the document.", "title": "Document Type", "type": "string"}}, "title": "FinancialDocumentMetadata", "type": "object"}, "FinancialLineItem": {"properties": {"tax": {"default": null, "description": "Tax amount for the line item.", "title": "Tax", "type": "integer"}, "amount_line": {"default": null, "description": "Total amount for the line item.", "title": "Amount Line", "type": "integer"}, "description": {"default": null, "description": "Description of the line item.", "title": "Description", "type": "string"}, "quantity": {"default": null, "description": "Quantity of units for the line item.", "title": "Quantity", "type": "integer"}, "unit_price": {"default": null, "description": "Unit price for each unit in the line item.", "title": "Unit Price", "type": "integer"}, "unit_type": {"default": null, "description": "Type of unit (e.g., hours, items).", "title": "Unit Type", "type": "string"}, "date": {"default": null, "description": "Date associated with the line item.", "title": "Date", "type": "string"}, "product_code": {"default": null, "description": "Product code or identifier for the line item.", "title": "Product Code", "type": "string"}, "purchase_order": {"default": null, "description": "Purchase order related to the line item.", "title": "Purchase Order", "type": "string"}, "tax_rate": {"default": null, "description": "Tax rate applied to the line item.", "title": "Tax Rate", "type": "integer"}, "base_total": {"default": null, "description": "Base total amount before any discounts or taxes.", "title": "Base Total", "type": "integer"}, "sub_total": {"default": null, "description": "Subtotal amount for the line item.", "title": "Sub Total", "type": "integer"}, "discount_amount": {"default": null, "description": "Amount of discount applied to the line item.", "title": "Discount Amount", "type": "integer"}, "discount_rate": {"default": null, "description": "Rate of discount applied to the line item.", "title": "Discount Rate", "type": "integer"}, "discount_code": {"default": null, "description": "Code associated with any discount applied to the line item.", "title": "Discount Code", "type": "string"}, "order_number": {"default": null, "description": "Order number associated with the line item.", "title": "Order Number", "type": "string"}, "title": {"default": null, "description": "Title or name of the line item.", "title": "Title", "type": "string"}}, "title": "FinancialLineItem", "type": "object"}, "FinancialLocalInformation": {"properties": {"currency": {"default": null, "description": "Currency used in financial transactions.", "title": "Currency", "type": "string"}, "currency_code": {"default": null, "description": "Currency code (e.g., USD, EUR).", "title": "Currency Code", "type": "string"}, "currency_exchange_rate": {"default": null, "description": "Exchange rate for the specified currency.", "title": "Currency Exchange Rate", "type": "string"}, "country": {"default": null, "description": "Country associated with the local financial information.", "title": "Country", "type": "string"}, "language": {"default": null, "description": "Language used in financial transactions.", "title": "Language", "type": "string"}}, "title": "FinancialLocalInformation", "type": "object"}, "FinancialMerchantInformation": {"properties": {"name": {"default": null, "description": "Name of the merchant.", "title": "Name", "type": "string"}, "address": {"default": null, "description": "Address of the merchant.", "title": "Address", "type": "string"}, "phone": {"default": null, "description": "Phone number of the merchant.", "title": "Phone", "type": "string"}, "tax_id": {"default": null, "description": "Tax identification number of the merchant.", "title": "Tax Id", "type": "string"}, "id_reference": {"default": null, "description": "Unique reference ID for the merchant.", "title": "Id Reference", "type": "string"}, "vat_number": {"default": null, "description": "VAT (Value Added Tax) number of the merchant.", "title": "Vat Number", "type": "string"}, "abn_number": {"default": null, "description": "ABN (Australian Business Number) of the merchant.", "title": "Abn Number", "type": "string"}, "gst_number": {"default": null, "description": "GST (Goods and Services Tax) number of the merchant.", "title": "Gst Number", "type": "string"}, "business_number": {"default": null, "description": "Business registration number of the merchant.", "title": "Business Number", "type": "string"}, "siret_number": {"default": null, "description": "SIRET (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises et de leurs \u00c9tablissements) number of the merchant.", "title": "Siret Number", "type": "string"}, "siren_number": {"default": null, "description": "SIREN (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises) number of the merchant.", "title": "Siren Number", "type": "string"}, "pan_number": {"default": null, "description": "PAN (Permanent Account Number) of the merchant.", "title": "Pan Number", "type": "string"}, "coc_number": {"default": null, "description": "Chamber of Commerce registration number of the merchant.", "title": "Coc Number", "type": "string"}, "fiscal_number": {"default": null, "description": "Fiscal identification number of the merchant.", "title": "Fiscal Number", "type": "string"}, "email": {"default": null, "description": "Email address of the merchant.", "title": "Email", "type": "string"}, "fax": {"default": null, "description": "Fax number of the merchant.", "title": "Fax", "type": "string"}, "website": {"default": null, "description": "Website of the merchant.", "title": "Website", "type": "string"}, "registration": {"default": null, "description": "Official registration information of the merchant.", "title": "Registration", "type": "string"}, "city": {"default": null, "description": "City associated with the merchant's address.", "title": "City", "type": "string"}, "country": {"default": null, "description": "Country associated with the merchant's address.", "title": "Country", "type": "string"}, "house_number": {"default": null, "description": "House number associated with the merchant's address.", "title": "House Number", "type": "string"}, "province": {"default": null, "description": "Province associated with the merchant's address.", "title": "Province", "type": "string"}, "street_name": {"default": null, "description": "Street name associated with the merchant's address.", "title": "Street Name", "type": "string"}, "zip_code": {"default": null, "description": "ZIP code associated with the merchant's address.", "title": "Zip Code", "type": "string"}, "country_code": {"default": null, "description": "Country code associated with the merchant's location.", "title": "Country Code", "type": "string"}}, "title": "FinancialMerchantInformation", "type": "object"}, "FinancialParserObjectDataClass": {"properties": {"customer_information": {"$ref": "#/components/schemas/FinancialCustomerInformation"}, "merchant_information": {"$ref": "#/components/schemas/FinancialMerchantInformation"}, "payment_information": {"$ref": "#/components/schemas/FinancialPaymentInformation"}, "financial_document_information": {"$ref": "#/components/schemas/FinancialDocumentInformation"}, "local": {"$ref": "#/components/schemas/FinancialLocalInformation"}, "bank": {"$ref": "#/components/schemas/FinancialBankInformation"}, "item_lines": {"description": "List of line items associated with the document.", "items": {"$ref": "#/components/schemas/FinancialLineItem"}, "title": "Item Lines", "type": "array"}, "document_metadata": {"$ref": "#/components/schemas/FinancialDocumentMetadata"}}, "required": ["customer_information", "merchant_information", "payment_information", "financial_document_information", "local", "bank", "document_metadata"], "title": "FinancialParserObjectDataClass", "type": "object"}, "FinancialPaymentInformation": {"properties": {"amount_due": {"default": null, "description": "Amount due for payment.", "title": "Amount Due", "type": "integer"}, "amount_tip": {"default": null, "description": "Tip amount in a financial transaction.", "title": "Amount Tip", "type": "integer"}, "amount_shipping": {"default": null, "description": "Shipping cost in a financial transaction.", "title": "Amount Shipping", "type": "integer"}, "amount_change": {"default": null, "description": "Change amount in a financial transaction.", "title": "Amount Change", "type": "integer"}, "amount_paid": {"default": null, "description": "Amount already paid in a financial transaction.", "title": "Amount Paid", "type": "integer"}, "total": {"default": null, "description": "Total amount in the invoice.", "title": "Total", "type": "integer"}, "subtotal": {"default": null, "description": "Subtotal amount in a financial transaction.", "title": "Subtotal", "type": "integer"}, "total_tax": {"default": null, "description": "Total tax amount in a financial transaction.", "title": "Total Tax", "type": "integer"}, "tax_rate": {"default": null, "description": "Tax rate applied in a financial transaction.", "title": "Tax Rate", "type": "integer"}, "discount": {"default": null, "description": "Discount amount applied in a financial transaction.", "title": "Discount", "type": "integer"}, "gratuity": {"default": null, "description": "Gratuity amount in a financial transaction.", "title": "Gratuity", "type": "integer"}, "service_charge": {"default": null, "description": "Service charge in a financial transaction.", "title": "Service Charge", "type": "integer"}, "previous_unpaid_balance": {"default": null, "description": "Previous unpaid balance in a financial transaction.", "title": "Previous Unpaid Balance", "type": "integer"}, "prior_balance": {"default": null, "description": "Prior balance before the current financial transaction.", "title": "Prior Balance", "type": "integer"}, "payment_terms": {"default": null, "description": "Terms and conditions for payment.", "title": "Payment Terms", "type": "string"}, "payment_method": {"default": null, "description": "Payment method used in the financial transaction.", "title": "Payment Method", "type": "string"}, "payment_card_number": {"default": null, "description": "Card number used in the payment.", "title": "Payment Card Number", "type": "string"}, "payment_auth_code": {"default": null, "description": "Authorization code for the payment.", "title": "Payment Auth Code", "type": "string"}, "shipping_handling_charge": {"default": null, "description": "Charge for shipping and handling in a financial transaction.", "title": "Shipping Handling Charge", "type": "integer"}, "transaction_number": {"default": null, "description": "Unique identifier for the financial transaction.", "title": "Transaction Number", "type": "string"}, "transaction_reference": {"default": null, "description": "Reference number for the financial transaction.", "title": "Transaction Reference", "type": "string"}}, "title": "FinancialPaymentInformation", "type": "object"}, "GeneralSentimentEnum": {"enum": ["Positive", "Negative", "Neutral"], "type": "string"}, "GenerateRequest": {"type": "object", "properties": {"messages": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "A list containing all the conversations between the user and the assistant.\nEach item in the list should be a dictionary with two keys: 'role' and 'message'.\n\n**role**: Specifies the role of the speaker and can have the values 'user', 'system', 'assistant' or 'tool'.\nThe system role instructs the way the model should answer, e.g. 'You are a helpful assistant'. The user\nrole specifies the user query and assistant is the model's response. The tool role is for external tools that\ncan be used in the conversation.\n\n**message**: A list of dictionaries. Each dictionary in the 'message' list must contain the keys 'type' and 'content'.\n\n#### Structure\n- **type**: Specifies the type of content and can be 'image_url' or 'text'.\n- **content**: A dictionary with the actual content based on the 'type':\n - If 'type' is 'image_url', 'content' must contain 'image_url' and must not contain 'text'.\n - If 'type' is 'text', 'content' must contain 'text' and must not contain 'image_url'.\n\n#### Example\n```json\n[\n {\n \"role\": \"user\",\n \"content\": [\n {\n \"type\": \"text\",\n \"text\": \"Describe this image\"\n },\n {\n \"type\": \"image_url\",\n \"image_url\": {\n \"url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg\"\n }\n }\n ]\n }\n]\n```"}, "model": {"type": "string", "minLength": 1, "description": "The OpenAI model to use for the chat completion. \nThis field is required and specifies which language model will process the conversation. \n\n**Example values**: 'gpt-3.5-turbo', 'gpt-4', 'gpt-4-turbo'"}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "metadata": {"type": "array", "items": {"$ref": "#/components/schemas/MetadataRequest"}, "description": "Optional list of metadata associated with the chat request. \nCan be used to provide additional context or tracking information. \n\n**Example**:\n```json\n{\n \"metadata\": [\n {\"key\": \"conversation_id\", \"value\": \"chat_12345\"},\n {\"key\": \"source\", \"value\": \"customer_support\"}\n ]\n}\n```"}, "frequency_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Controls repetitiveness of model responses by penalizing frequent tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Reduce token repetition\n- Negative values: Encourage repetition\n- 0.0: Default behavior\n\n**Example**: 1.5 to significantly reduce repeated phrases"}, "logit_bias": {"type": "object", "additionalProperties": {"type": "number", "format": "double"}, "description": "Modify the likelihood of specific tokens appearing in the response. \nA dictionary where keys are token IDs and values are bias scores. \n\n**Example**:\n```json\n{\n \"logit_bias\": {\n \"50256\": -100, # Reduce probability of end-of-text token\n \"15\": 5 # Slightly increase probability of a specific token\n }\n}\n```"}, "logprobs": {"type": "boolean", "description": "If set to True, returns log probabilities of the most likely tokens. \nUseful for advanced token probability analysis. \n\n**Example**: True to get detailed token likelihood information"}, "top_logprobs": {"type": "integer", "maximum": 20, "minimum": 0, "description": "Number of top log probabilities to return with each token. \nMust be between 0 and 20. \n\n**Example**: 5 to get top 5 most likely tokens for each position"}, "max_completion_tokens": {"type": "integer", "minimum": 1, "description": "Maximum number of tokens to generate in the completion. \nMust be at least 1. \n\n**Example**: 150 to limit response to approximately 100-150 words"}, "n": {"type": "integer", "minimum": 1, "description": "Number of chat completion choices to generate.\n\n **Example**: 3 to generate multiple alternative responses"}, "modalities": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of supported input/output modalities for the chat. \n\n**Example**:\n```json\n{\n \"modalities\": [\"text\", \"image\", \"audio\"]\n}\n```"}, "prediction": {"type": "object", "additionalProperties": {}, "description": "Optional field for storing prediction-related information. \nFlexible dictionary to capture model's predictive metadata. \n\n**Example**:\n```json\n{\n \"prediction\": {\n \"confidence_score\": 0.85,\n \"top_prediction\": \"response_category\"\n }\n}\n```"}, "audio": {"type": "object", "additionalProperties": {}, "description": "Optional dictionary for audio-related parameters or metadata. \n\n**Example**:\n```json\n{\n \"audio\": {\n \"language\": \"en-US\",\n \"transcription_format\": \"srt\"\n }\n}\n```"}, "presence_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Adjusts likelihood of discussing new topics by penalizing existing tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Encourage more diverse topics\n- Negative values: Keep discussion more focused\n- 0.0: Default behavior\n\n**Example**: 1.0 to promote topic diversity"}, "response_format": {"type": "object", "additionalProperties": {}, "description": "Specify the desired response format for the completion. \n\n**Example**:\n```json\n{\n \"response_format\": {\n \"type\": \"json_object\",\n \"schema\": {...}\n }\n}\n```"}, "seed": {"type": "integer", "description": "Set a seed for deterministic sampling to reproduce consistent results. \n\n**Example**: 42 for a reproducible random generation process"}, "service_tier": {"allOf": [{"$ref": "#/components/schemas/ServiceTierEnum"}], "description": "Select the service tier for the API request. \n\n**Choices**:\n- 'auto': Automatically select appropriate tier\n- 'default': Use default service configuration\n\n* `auto` - auto\n* `default` - default"}, "stop": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of strings that will cause the model to stop generating. \n\n**Example**:\n```json\n{\n \"stop\": [\"\\n\", \"Human:\", \"AI:\"]\n}\n```"}, "stream": {"type": "boolean", "default": false, "description": "If True, returns tokens as they are generated in a streaming format. \nDefault is False. \n\n**Example**: True for real-time token streaming"}, "stream_options": {"type": "object", "additionalProperties": {}, "description": "Additional configuration for streaming responses. \n\n**Example**:\n```json\n{\n \"stream_options\": {\n \"include_usage\": true\n }\n}\n```"}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "description": "Controls randomness in token selection. \nRanges from 0.0 to 2.0. \n\n**Values**:\n- 0.0: Most deterministic, focused responses\n- 1.0: Balanced randomness\n- 2.0: Most creative, unpredictable responses\n\n**Example**: 0.7 for a good balance of creativity and focus"}, "top_p": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "description": "Nucleus sampling threshold for token selection. \nRanges from 0.0 to 1.0. Default is 1.0. \n\n**Values**:\n- 1.0: Consider all tokens\n- Lower values: More focused, deterministic sampling\n\n**Example**: 0.9 to select from top 90% most probable tokens"}, "tools": {"type": "array", "items": {}, "description": "List of tools or function definitions available to the model. \n\n**Example**:\n```json\n{\n \"tools\": [\n {\n \"type\": \"function\",\n \"function\": {\n \"name\": \"get_weather\",\n \"description\": \"Retrieve current weather\"\n }\n }\n ]\n}\n```"}, "tool_choice": {"type": "string", "minLength": 1, "description": "Specify how tools should be used in the completion. \n\n**Example values**:\n- 'auto': Model decides when to use tools\n- 'none': Disable tool usage\n- Specific tool name to always use a particular tool"}, "parallel_tool_calls": {"type": "boolean", "description": "Allow the model to make multiple tool calls in parallel. \n\n**Example**: True to enable concurrent tool invocations"}, "user": {"type": "string", "minLength": 1, "description": "Optional identifier for the end-user to help track and monitor API usage. \n\n**Example**: 'user_123456'"}, "function_call": {"type": "string", "minLength": 1, "description": "Control how function calls are handled. \n\n**Example values**:\n- 'auto': Default behavior\n- 'none': Disable function calls\n- Specific function name to force its execution"}, "functions": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "List of function definitions available to the model. \n\n**Example**:\n```json\n{\n \"functions\": [\n {\n \"name\": \"get_current_weather\",\n \"description\": \"Get the current weather for a location\",\n \"parameters\": {...}\n }\n ]\n}\n```"}, "thinking": {"allOf": [{"$ref": "#/components/schemas/ThinkingRequest"}], "description": "Configuration for enabling Claude's extended thinking. When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer. Requires a minimum budget of 1,024 tokens and counts towards your max_tokens limit.\n\n**Example**:\n```json\n{\n 'thinking': {\n 'type': 'enabled'\n 'budget_tokens': '1024' }\n}\n```"}, "web_search_options": {"type": "object", "additionalProperties": {}, "description": "Options for web search integration. \n **Example**:\n ```json\n web_search_options={\n \"search_context_size\": \"medium\" # Options: \"low\", \"medium\", \"high\"\n }\n ```"}, "filter_documents": {"type": "object", "additionalProperties": {}, "default": {}, "description": "Filter uploaded documents based on their metadata. Specify key-value pairs where the key represents the metadata field and the value is the desired metadata value. Please ensure that the provided metadata keys are available in your database."}, "min_score": {"type": "number", "format": "double", "default": 0.0, "description": "A minimum score threshold for the model to consider a chunk as a valid response. Higher values mean the model will be more conservative and only return chunks that are more similar to the query. Lower values mean the model will be more open to returning chunks that are less similar to the query."}, "k": {"type": "integer", "minimum": 1, "default": 3, "description": "How many results chunk you want to return"}, "max_tokens": {"type": "integer", "maximum": 16385, "minimum": 1, "default": 100, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "conversation_id": {"type": "string", "format": "uuid"}}, "required": ["messages", "model"]}, "GeneratedImageDataClass": {"properties": {"image": {"title": "Image", "type": "string"}, "image_resource_url": {"title": "Image Resource Url", "type": "string"}}, "required": ["image", "image_resource_url"], "title": "GeneratedImageDataClass", "type": "object"}, "GenerationAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "text": {"type": "string", "minLength": 1, "description": "The text prompt that describes the content and style of the video you want to generate. Be specific and detailed to guide the AI in creating your desired video."}, "file": {"type": "string", "format": "binary", "description": "Initial keyframe image for the video. Upload a single JPEG or PNG file that will serve as the starting point for video generation. This image will influence the style, composition, and initial scene of the generated video.", "pattern": "(?:png|jpg)$"}, "duration": {"type": "integer", "minimum": 1, "default": 6, "description": "Length of the generated video in seconds. Minimum duration is 1 second. If not specified, the video will default to 6 seconds."}, "fps": {"type": "integer", "minimum": 1, "default": 24, "description": "Frames per second (FPS) of the generated video. Determines the smoothness of the video motion. Default is 24 FPS, which is standard for most video content. Minimum value is 1 FPS."}, "dimension": {"type": "string", "minLength": 1, "default": "1280x720", "description": "Video resolution in the format 'width x height' (e.g., '1280x720'). Default resolution is 1280x720 (720p). Ensure the format is exactly 'width x height' with a lowercase 'x'."}, "seed": {"type": "integer", "default": 12, "description": "Random seed value that initializes the noise for the generation process. Use this to create consistent or unique variations of your video:\n- Range: 0 to 2,147,483,646\n- Default: 12\n- Changing the seed while keeping other parameters constant will produce a different video that still matches your prompt\n- Useful for exploring multiple creative variations of the same concept"}}, "required": ["providers", "text"]}, "ImageaiDetectionAiDetectionDataClassPredictionEnum": {"enum": ["ai-generated", "original"], "type": "string"}, "InfosIdentityParserDataClass": {"properties": {"last_name": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "given_names": {"items": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "title": "Given Names", "type": "array"}, "birth_place": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "birth_date": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "issuance_date": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "expire_date": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "document_id": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "issuing_state": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "address": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "age": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "country": {"$ref": "#/components/schemas/Country"}, "document_type": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "gender": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "image_id": {"items": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "title": "Image Id", "type": "array"}, "image_signature": {"items": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "title": "Image Signature", "type": "array"}, "mrz": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "nationality": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}}, "required": ["last_name", "birth_place", "birth_date", "issuance_date", "expire_date", "document_id", "issuing_state", "address", "age", "country", "document_type", "gender", "mrz", "nationality"], "title": "InfosIdentityParserDataClass", "type": "object"}, "InfosKeywordExtractionDataClass": {"properties": {"keyword": {"title": "Keyword", "type": "string"}, "importance": {"title": "Importance", "type": "integer"}}, "required": ["keyword", "importance"], "title": "InfosKeywordExtractionDataClass", "type": "object"}, "InfosLanguageDetectionDataClass": {"properties": {"language": {"title": "Language", "type": "string"}, "display_name": {"title": "Display Name", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["language", "display_name", "confidence"], "title": "InfosLanguageDetectionDataClass", "type": "object"}, "InfosNamedEntityRecognitionDataClass": {"properties": {"entity": {"title": "Entity", "type": "string"}, "category": {"title": "Category", "type": "string"}, "importance": {"title": "Importance", "type": "integer"}}, "required": ["entity", "category", "importance"], "title": "InfosNamedEntityRecognitionDataClass", "type": "object"}, "ItemBankCheckParsingDataClass": {"properties": {"amount": {"title": "Amount", "type": "integer"}, "amount_text": {"title": "Amount Text", "type": "string"}, "bank_address": {"title": "Bank Address", "type": "string"}, "bank_name": {"title": "Bank Name", "type": "string"}, "date": {"title": "Date", "type": "string"}, "memo": {"title": "Memo", "type": "string"}, "payer_address": {"title": "Payer Address", "type": "string"}, "payer_name": {"title": "Payer Name", "type": "string"}, "receiver_address": {"title": "Receiver Address", "type": "string"}, "receiver_name": {"title": "Receiver Name", "type": "string"}, "currency": {"title": "Currency", "type": "string"}, "micr": {"$ref": "#/components/schemas/MicrModel"}}, "required": ["amount", "amount_text", "bank_address", "bank_name", "date", "memo", "payer_address", "payer_name", "receiver_address", "receiver_name", "currency", "micr"], "title": "ItemBankCheckParsingDataClass", "type": "object"}, "ItemDataExtraction": {"properties": {"key": {"title": "Key", "type": "string"}, "value": {"title": "Value"}, "bounding_box": {"$ref": "#/components/schemas/BoundingBox"}, "confidence_score": {"maximum": 1.0, "minimum": 0.0, "title": "Confidence Score", "type": "integer"}}, "required": ["key", "value", "bounding_box", "confidence_score"], "title": "ItemDataExtraction", "type": "object"}, "ItemIdentityParserDataClass": {"properties": {"value": {"default": null, "title": "Value", "type": "string"}, "confidence": {"default": null, "title": "Confidence", "type": "integer"}}, "title": "ItemIdentityParserDataClass", "type": "object"}, "LandmarkItem": {"properties": {"description": {"title": "Description", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "bounding_box": {"items": {"$ref": "#/components/schemas/LandmarkVertice"}, "title": "Bounding Box", "type": "array"}, "locations": {"items": {"$ref": "#/components/schemas/LandmarkLocation"}, "title": "Locations", "type": "array"}}, "required": ["description", "confidence"], "title": "LandmarkItem", "type": "object"}, "LandmarkLatLng": {"properties": {"latitude": {"title": "Latitude", "type": "integer"}, "longitude": {"title": "Longitude", "type": "integer"}}, "required": ["latitude", "longitude"], "title": "LandmarkLatLng", "type": "object"}, "LandmarkLocation": {"properties": {"lat_lng": {"$ref": "#/components/schemas/LandmarkLatLng"}}, "required": ["lat_lng"], "title": "LandmarkLocation", "type": "object"}, "LandmarkVertice": {"properties": {"x": {"title": "X", "type": "integer"}, "y": {"title": "Y", "type": "integer"}}, "required": ["x", "y"], "title": "LandmarkVertice", "type": "object"}, "LandmarksVideo": {"properties": {"eye_left": {"items": {"type": "integer"}, "title": "Eye Left", "type": "array"}, "eye_right": {"items": {"type": "integer"}, "title": "Eye Right", "type": "array"}, "nose": {"items": {"type": "integer"}, "title": "Nose", "type": "array"}, "mouth_left": {"items": {"type": "integer"}, "title": "Mouth Left", "type": "array"}, "mouth_right": {"items": {"type": "integer"}, "title": "Mouth Right", "type": "array"}}, "title": "LandmarksVideo", "type": "object"}, "LaunchAsyncJobResponse": {"type": "object", "properties": {"public_id": {"type": "string", "format": "uuid"}}, "required": ["public_id"]}, "Line": {"description": "Line of a document\n\nAttributes:\n text (str): Text detected in the line\n bounding_boxes (Sequence[BoundingBox]): Bounding boxes of the words in the line\n words (Sequence[Word]): List of words of the line\n confidence (float): Confidence of the line", "properties": {"text": {"description": "Text detected in the line", "title": "Text", "type": "string"}, "words": {"description": "List of words", "items": {"$ref": "#/components/schemas/Word"}, "title": "Words", "type": "array"}, "bounding_box": {"allOf": [{"$ref": "#/components/schemas/BoundingBox"}], "default": null, "description": "Bounding box of the line, can be None"}, "confidence": {"description": "Confidence of the line", "title": "Confidence", "type": "integer"}}, "required": ["text", "confidence"], "title": "Line", "type": "object"}, "ListAsyncJobResponse": {"type": "object", "properties": {"jobs": {"type": "array", "items": {"$ref": "#/components/schemas/AsyncJobList"}}}, "required": ["jobs"]}, "ListChunkRequest": {"type": "object", "properties": {"filter_documents": {"type": "object", "additionalProperties": {}, "default": {}, "description": "Filter uploaded documents based on their metadata. Specify key-value pairs where the key represents the metadata field and the value is the desired metadata value. Please ensure that the provided metadata keys are available in your database."}, "limit": {"type": "integer", "minimum": 1, "default": 10, "description": "Specifies the maximum number of chunk IDs to return. Defaults to 10 if not provided."}, "with_payload": {"type": "boolean", "default": false, "description": "If set to True, includes the full payload of each chunk in the response. By default, only chunk IDs are returned."}}}, "LogoBoundingPoly": {"properties": {"vertices": {"description": "Vertices of the logos in the image", "items": {"$ref": "#/components/schemas/LogoVertice"}, "title": "Vertices", "type": "array"}}, "title": "LogoBoundingPoly", "type": "object"}, "LogoItem": {"properties": {"bounding_poly": {"allOf": [{"$ref": "#/components/schemas/LogoBoundingPoly"}], "default": {"vertices": []}}, "description": {"description": "Name of the logo", "title": "Description", "type": "string"}, "score": {"description": "Confidence score how sure it's this is a real logo.", "title": "Score", "type": "integer"}}, "required": ["description", "score"], "title": "LogoItem", "type": "object"}, "LogoTrack": {"properties": {"description": {"title": "Description", "type": "string"}, "tracking": {"items": {"$ref": "#/components/schemas/VideoLogo"}, "title": "Tracking", "type": "array"}}, "required": ["description"], "title": "LogoTrack", "type": "object"}, "LogoVertice": {"properties": {"x": {"description": "The x-coordinate of the vertex.", "title": "X", "type": "integer"}, "y": {"description": "The y-coordinate of the vertex.", "title": "Y", "type": "integer"}}, "required": ["x", "y"], "title": "LogoVertice", "type": "object"}, "LowerCloth": {"properties": {"value": {"title": "Value", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["value", "confidence"], "title": "LowerCloth", "type": "object"}, "Message": {"type": "object", "properties": {"user_text": {"type": "string"}, "assistant_text": {"type": "string", "nullable": true}}, "required": ["user_text"]}, "MessageRequest": {"type": "object", "properties": {"user_text": {"type": "string", "minLength": 1}, "assistant_text": {"type": "string", "nullable": true}}, "required": ["user_text"]}, "MetadataRequest": {"type": "object", "properties": {"key": {"type": "string", "minLength": 1, "maxLength": 64}, "value": {"type": "string", "minLength": 1, "maxLength": 512}}, "required": ["key", "value"]}, "MicrModel": {"properties": {"raw": {"title": "Raw", "type": "string"}, "account_number": {"title": "Account Number", "type": "string"}, "routing_number": {"title": "Routing Number", "type": "string"}, "serial_number": {"title": "Serial Number", "type": "string"}, "check_number": {"title": "Check Number", "type": "string"}}, "required": ["raw", "account_number", "routing_number", "serial_number", "check_number"], "title": "MicrModel", "type": "object"}, "NestedBadRequest": {"type": "object", "properties": {"type": {"type": "string"}, "message": {"$ref": "#/components/schemas/FieldError"}}, "required": ["message", "type"]}, "NestedError": {"type": "object", "properties": {"type": {"type": "string"}, "message": {"type": "string"}}, "required": ["message", "type"]}, "NotFoundResponse": {"type": "object", "properties": {"details": {"type": "string", "default": "Not Found"}}}, "NullEnum": {"enum": [null]}, "ObjectFrame": {"properties": {"timestamp": {"title": "Timestamp", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoObjectBoundingBox"}}, "required": ["timestamp", "bounding_box"], "title": "ObjectFrame", "type": "object"}, "ObjectItem": {"properties": {"label": {"title": "Label", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "x_min": {"title": "X Min", "type": "integer"}, "x_max": {"title": "X Max", "type": "integer"}, "y_min": {"title": "Y Min", "type": "integer"}, "y_max": {"title": "Y Max", "type": "integer"}}, "required": ["label", "confidence", "x_min", "x_max", "y_min", "y_max"], "title": "ObjectItem", "type": "object"}, "ObjectTrack": {"properties": {"description": {"title": "Description", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "frames": {"items": {"$ref": "#/components/schemas/ObjectFrame"}, "title": "Frames", "type": "array"}}, "required": ["description", "confidence"], "title": "ObjectTrack", "type": "object"}, "OcrTablesAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "language": {"type": "string", "nullable": true, "description": "Language code of the language the document is written in (ex: fr (French), en (English), es (Spanish))"}}, "required": ["providers"]}, "OptionEnum": {"enum": ["FEMALE", "MALE"], "type": "string", "description": "* `FEMALE` - Female\n* `MALE` - Male"}, "Page": {"description": "Page of a document\n\nAttributes:\n lines (Sequence[Line]): List of lines of the page", "properties": {"lines": {"description": "List of lines", "items": {"$ref": "#/components/schemas/Line"}, "title": "Lines", "type": "array"}}, "title": "Page", "type": "object"}, "PaginatedBatchResponse": {"type": "object", "properties": {"total": {"type": "integer", "description": "Total requests made"}, "current_page": {"type": "integer", "description": "Current page number"}, "last_page": {"type": "integer"}, "per_page": {"type": "integer", "description": "Number of requests per page"}, "From": {"type": "integer"}, "to": {"type": "integer"}, "prev_page_url": {"type": "string", "format": "uri"}, "next_page_url": {"type": "string", "format": "uri"}, "requests": {"type": "array", "items": {"$ref": "#/components/schemas/BatchResponseRequest"}}, "status": {"$ref": "#/components/schemas/Status889Enum"}, "created": {"type": "string", "format": "date-time", "readOnly": true}, "updated": {"type": "string", "format": "date-time", "readOnly": true}}, "required": ["From", "created", "current_page", "last_page", "per_page", "requests", "to", "total", "updated"]}, "PaginatedExecutionListList": {"type": "object", "required": ["count", "results"], "properties": {"count": {"type": "integer"}, "next": {"type": "string", "nullable": true, "format": "uri"}, "previous": {"type": "string", "nullable": true, "format": "uri"}, "results": {"type": "array", "items": {"$ref": "#/components/schemas/ExecutionList"}}}}, "PaginatedPromptCreateList": {"type": "object", "required": ["count", "results"], "properties": {"count": {"type": "integer"}, "next": {"type": "string", "nullable": true, "format": "uri"}, "previous": {"type": "string", "nullable": true, "format": "uri"}, "results": {"type": "array", "items": {"$ref": "#/components/schemas/PromptCreate"}}}}, "PaginatedPromptHistoryList": {"type": "object", "required": ["count", "results"], "properties": {"count": {"type": "integer"}, "next": {"type": "string", "nullable": true, "format": "uri"}, "previous": {"type": "string", "nullable": true, "format": "uri"}, "results": {"type": "array", "items": {"$ref": "#/components/schemas/PromptHistory"}}}}, "PatchedAskYodaProjectUpdateRequest": {"type": "object", "properties": {"ocr_provider": {"type": "string", "minLength": 1, "default": "amazon"}, "speech_to_text_provider": {"type": "string", "minLength": 1, "default": "openai"}, "llm_provider": {"type": "string", "minLength": 1, "description": "Select a default LLM provider to use in your project."}, "llm_model": {"type": "string", "minLength": 1, "description": "Select a default Model for LLM provider to use in your project"}, "chunk_size": {"type": "integer", "maximum": 10000, "minimum": 1, "nullable": true}, "chunk_separators": {"type": "array", "items": {"type": "string"}, "nullable": true}}}, "PatchedAssetUpdateRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}}, "PatchedConversationDetailRequest": {"type": "object", "properties": {"name": {"type": "string", "nullable": true, "maxLength": 255}}}, "PatchedCustomTokenUpdateRequest": {"type": "object", "properties": {"balance": {"type": "number", "format": "double", "maximum": 100000, "minimum": -100000, "exclusiveMaximum": true, "exclusiveMinimum": true, "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}}, "PatchedPromptHistoryRequest": {"type": "object", "properties": {"text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "PatchedPromptUpdateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "current_version": {"type": "integer", "description": "The production version of the prompt, that is used by default when you call the prompt"}, "file_urls": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "PatchedResourceUpdateRequest": {"type": "object", "properties": {"resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "minLength": 1, "maxLength": 100}}}, "PatchedWebhookParametersRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}}, "PersonAttributes": {"properties": {"upper_cloths": {"items": {"$ref": "#/components/schemas/UpperCloth"}, "title": "Upper Cloths", "type": "array"}, "lower_cloths": {"items": {"$ref": "#/components/schemas/LowerCloth"}, "title": "Lower Cloths", "type": "array"}}, "title": "PersonAttributes", "type": "object"}, "PersonLandmarks": {"properties": {"eye_left": {"items": {"type": "integer"}, "title": "Eye Left", "type": "array"}, "eye_right": {"items": {"type": "integer"}, "title": "Eye Right", "type": "array"}, "nose": {"items": {"type": "integer"}, "title": "Nose", "type": "array"}, "ear_left": {"items": {"type": "integer"}, "title": "Ear Left", "type": "array"}, "ear_right": {"items": {"type": "integer"}, "title": "Ear Right", "type": "array"}, "shoulder_left": {"items": {"type": "integer"}, "title": "Shoulder Left", "type": "array"}, "shoulder_right": {"items": {"type": "integer"}, "title": "Shoulder Right", "type": "array"}, "elbow_left": {"items": {"type": "integer"}, "title": "Elbow Left", "type": "array"}, "elbow_right": {"items": {"type": "integer"}, "title": "Elbow Right", "type": "array"}, "wrist_left": {"items": {"type": "integer"}, "title": "Wrist Left", "type": "array"}, "wrist_right": {"items": {"type": "integer"}, "title": "Wrist Right", "type": "array"}, "hip_left": {"items": {"type": "integer"}, "title": "Hip Left", "type": "array"}, "hip_right": {"items": {"type": "integer"}, "title": "Hip Right", "type": "array"}, "knee_left": {"items": {"type": "integer"}, "title": "Knee Left", "type": "array"}, "knee_right": {"items": {"type": "integer"}, "title": "Knee Right", "type": "array"}, "ankle_left": {"items": {"type": "integer"}, "title": "Ankle Left", "type": "array"}, "ankle_right": {"items": {"type": "integer"}, "title": "Ankle Right", "type": "array"}, "mouth_left": {"items": {"type": "integer"}, "title": "Mouth Left", "type": "array"}, "mouth_right": {"items": {"type": "integer"}, "title": "Mouth Right", "type": "array"}}, "title": "PersonLandmarks", "type": "object"}, "PersonTracking": {"properties": {"offset": {"title": "Offset", "type": "integer"}, "attributes": {"$ref": "#/components/schemas/PersonAttributes"}, "landmarks": {"$ref": "#/components/schemas/PersonLandmarks"}, "poses": {"$ref": "#/components/schemas/VideoPersonPoses"}, "quality": {"$ref": "#/components/schemas/VideoPersonQuality"}, "bounding_box": {"$ref": "#/components/schemas/VideoTrackingBoundingBox"}}, "required": ["offset", "bounding_box"], "title": "PersonTracking", "type": "object"}, "PlagiaDetectionCandidate": {"properties": {"url": {"title": "Url", "type": "string"}, "plagia_score": {"title": "Plagia Score", "type": "integer"}, "prediction": {"title": "Prediction", "type": "string"}, "plagiarized_text": {"title": "Plagiarized Text", "type": "string"}}, "required": ["url", "plagia_score", "prediction", "plagiarized_text"], "title": "PlagiaDetectionCandidate", "type": "object"}, "PlagiaDetectionItem": {"properties": {"text": {"title": "Text", "type": "string"}, "candidates": {"items": {"$ref": "#/components/schemas/PlagiaDetectionCandidate"}, "title": "Candidates", "type": "array"}}, "required": ["text"], "title": "PlagiaDetectionItem", "type": "object"}, "PredictionB20Enum": {"enum": ["deepfake", "original"], "type": "string"}, "PriceUnitTypeEnum": {"enum": ["file", "image", "page", "size", "request", "seconde", "minute", "free", "hour", "char", "token", "exec_time", "unknown"], "type": "string", "description": "* `file` - File\n* `image` - Image\n* `page` - Page\n* `size` - Size\n* `request` - Request\n* `seconde` - Second\n* `minute` - Minute\n* `free` - Free\n* `hour` - Hour\n* `char` - Characters\n* `token` - Token\n* `exec_time` - Execution Time\n* `unknown` - Unknown"}, "PricingSerialzier": {"type": "object", "properties": {"model_name": {"type": "string", "description": "Model name, default to 'default' if no models to chose from", "maxLength": 255}, "price": {"type": "string", "format": "decimal", "pattern": "^-?\\d{0,6}(?:\\.\\d{0,9})?$"}, "price_unit_quantity": {"type": "integer", "maximum": 2147483647, "minimum": 0}, "min_price_quantity": {"type": "integer", "maximum": 2147483647, "minimum": 0, "nullable": true}, "price_unit_type": {"$ref": "#/components/schemas/PriceUnitTypeEnum"}, "detail_type": {"nullable": true, "description": "(Optional) type of extra value, MUST be the same name as the feature parameter name. eg: resolution\n\n* `resolution` - Resolution\n* `document_type` - Document Type", "oneOf": [{"$ref": "#/components/schemas/DetailTypeEnum"}, {"$ref": "#/components/schemas/BlankEnum"}, {"$ref": "#/components/schemas/NullEnum"}]}, "detail_value": {"type": "string", "nullable": true, "description": "(Optional) extra value for detailed pricing, eg: 250x250 for resolution", "maxLength": 255}, "get_detail_type_display": {"type": "string", "readOnly": true}, "is_post_call": {"type": "boolean"}}, "required": ["get_detail_type_display"]}, "ProjectTypeEnum": {"enum": ["AskYoDa", "Translathor", "X-Merge"], "type": "string", "description": "* `AskYoDa` - Askyoda\n* `Translathor` - Translathor\n* `X-Merge` - X Merge"}, "PromptCallRequest": {"type": "object", "properties": {"model": {"type": "string", "nullable": true, "minLength": 1, "description": "Optional model override (e.g., 'openai/gpt-4o'). If not provided, the model specified in the prompt will be used."}, "prompt_context": {"type": "object", "additionalProperties": {}, "nullable": true, "default": {}, "description": "Variables to inject into the prompt template.\n#### Example\n```json\n{\n \"my_variable\": \"some_value\",\n}\n```"}, "params": {"type": "object", "additionalProperties": {}, "nullable": true, "default": {}, "description": "Optional params override. If not provided, the default params in the prompt will be used\n\nParams that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models. Overrides the file urls set on the prompt."}}}, "PromptCreate": {"type": "object", "properties": {"id": {"type": "integer", "readOnly": true}, "name": {"type": "string", "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "current_version": {"type": "integer", "readOnly": true, "nullable": true}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true}, "history_count": {"type": "integer", "readOnly": true}}, "required": ["created_at", "current_version", "history_count", "id", "name", "updated_at"]}, "PromptCreateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "writeOnly": true, "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "writeOnly": true, "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"writeOnly": true, "description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}, "writeOnly": true, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "writeOnly": true, "minLength": 1, "description": "Specify a system prompt for the LLM"}}, "required": ["model", "name", "text"]}, "PromptDataClass": {"properties": {"text": {"title": "Text", "type": "string"}}, "required": ["text"], "title": "PromptDataClass", "type": "object"}, "PromptHistory": {"type": "object", "properties": {"id": {"type": "integer", "readOnly": true}, "version": {"type": "integer", "readOnly": true}, "text": {"type": "string", "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "name": {"type": "string", "readOnly": true}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "maxLength": 1000}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "description": "Specify a system prompt for the LLM"}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true}}, "required": ["created_at", "id", "name", "updated_at", "version"]}, "PromptHistoryRequest": {"type": "object", "properties": {"text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "PromptUpdate": {"type": "object", "properties": {"id": {"type": "integer", "readOnly": true}, "name": {"type": "string", "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "current_version": {"type": "integer", "description": "The production version of the prompt, that is used by default when you call the prompt"}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true}, "file_urls": {"type": "array", "items": {"type": "string"}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "nullable": true, "description": "Specify a system prompt for the LLM"}}, "required": ["created_at", "id", "updated_at"]}, "PromptUpdateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "current_version": {"type": "integer", "description": "The production version of the prompt, that is used by default when you call the prompt"}, "file_urls": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "Provider": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 255}, "fullname": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true, "maxLength": 1000}}, "required": ["name"]}, "ProviderSubfeature": {"type": "object", "properties": {"name": {"type": "string", "readOnly": true}, "version": {"type": "string", "readOnly": true}, "pricings": {"type": "array", "items": {"$ref": "#/components/schemas/PricingSerialzier"}}, "is_working": {"type": "boolean"}, "description_title": {"type": "string", "nullable": true, "maxLength": 100}, "description_content": {"type": "string", "nullable": true, "maxLength": 1000}, "provider": {"$ref": "#/components/schemas/Provider"}, "feature": {"$ref": "#/components/schemas/Feature"}, "subfeature": {"$ref": "#/components/schemas/Subfeature"}, "constraints": {"type": "object", "additionalProperties": {}, "readOnly": true}, "models": {"type": "object", "additionalProperties": {}, "readOnly": true}, "tts_models": {"type": "object", "additionalProperties": {}, "readOnly": true}, "llm_details": {"type": "object", "additionalProperties": {}, "readOnly": true}, "languages": {"type": "array", "items": {"type": "object", "properties": {"language_name": {"type": "string"}, "language_code": {"type": "string"}}, "required": ["language_code", "language_name"]}, "readOnly": true}, "phase": {"type": "string", "readOnly": true}, "regions": {"type": "array", "items": {"type": "string", "description": "2-letter ISO 3166-1 alpha-2 country code (e.g., 'us', 'eu', 'fr')"}, "readOnly": true}}, "required": ["constraints", "feature", "languages", "llm_details", "models", "name", "phase", "pricings", "provider", "regions", "subfeature", "tts_models", "version"]}, "QuestionAnswerAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mp4|mpeg|mov|avi|x\\-flx|mpg|webm|wmv|3gpp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "maximum": 3000000, "minimum": 1, "description": "The maximum number of tokens to generate in the completion. This value, combined with the token count of your prompt, cannot exceed the model's context length."}, "text": {"type": "string", "minLength": 1, "description": "Question about the video"}}, "required": ["providers", "text"]}, "ReasoningEffortEnum": {"enum": ["low", "medium", "high"], "type": "string", "description": "**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "RepresentationEnum": {"enum": ["document", "query", "symetric"], "type": "string", "description": "* `document` - document\n* `query` - query\n* `symetric` - symetric"}, "ResourceCreate": {"type": "object", "properties": {"resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "maxLength": 100}}, "required": ["data", "provider", "resource", "type"]}, "ResourceCreateRequest": {"type": "object", "properties": {"resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "minLength": 1, "maxLength": 100}}, "required": ["data", "provider", "resource", "type"]}, "ResourceList": {"type": "object", "properties": {"resource": {"type": "string", "maxLength": 255}, "data": {"type": "string", "format": "byte", "readOnly": true, "nullable": true}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "maxLength": 100}, "assets": {"type": "array", "items": {"$ref": "#/components/schemas/AssetList"}}}, "required": ["assets", "data", "provider", "resource", "type"]}, "ResourceUpdate": {"type": "object", "properties": {"resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "maxLength": 100}, "assets": {"type": "array", "items": {"$ref": "#/components/schemas/AssetList"}, "readOnly": true}}, "required": ["assets", "data", "provider", "resource", "type"]}, "ResourceUpdateRequest": {"type": "object", "properties": {"resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "minLength": 1, "maxLength": 100}}, "required": ["data", "provider", "resource", "type"]}, "ResumeEducation": {"properties": {"total_years_education": {"title": "Total Years Education", "type": "integer"}, "entries": {"items": {"$ref": "#/components/schemas/ResumeEducationEntry"}, "title": "Entries", "type": "array"}}, "required": ["total_years_education"], "title": "ResumeEducation", "type": "object"}, "ResumeEducationEntry": {"properties": {"title": {"title": "Title", "type": "string"}, "start_date": {"title": "Start Date", "type": "string"}, "end_date": {"title": "End Date", "type": "string"}, "location": {"$ref": "#/components/schemas/ResumeLocation"}, "establishment": {"title": "Establishment", "type": "string"}, "description": {"title": "Description", "type": "string"}, "gpa": {"title": "Gpa", "type": "string"}, "accreditation": {"title": "Accreditation", "type": "string"}}, "required": ["title", "start_date", "end_date", "location", "establishment", "description", "gpa", "accreditation"], "title": "ResumeEducationEntry", "type": "object"}, "ResumeExtractedData": {"properties": {"personal_infos": {"$ref": "#/components/schemas/ResumePersonalInfo"}, "education": {"$ref": "#/components/schemas/ResumeEducation"}, "work_experience": {"$ref": "#/components/schemas/ResumeWorkExp"}, "languages": {"items": {"$ref": "#/components/schemas/ResumeLang"}, "title": "Languages", "type": "array"}, "skills": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Skills", "type": "array"}, "certifications": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Certifications", "type": "array"}, "courses": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Courses", "type": "array"}, "publications": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Publications", "type": "array"}, "interests": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Interests", "type": "array"}}, "required": ["personal_infos", "education", "work_experience"], "title": "ResumeExtractedData", "type": "object"}, "ResumeLang": {"properties": {"name": {"title": "Name", "type": "string"}, "code": {"title": "Code", "type": "string"}}, "required": ["name", "code"], "title": "ResumeLang", "type": "object"}, "ResumeLocation": {"properties": {"formatted_location": {"title": "Formatted Location", "type": "string"}, "postal_code": {"title": "Postal Code", "type": "string"}, "region": {"title": "Region", "type": "string"}, "country": {"title": "Country", "type": "string"}, "country_code": {"title": "Country Code", "type": "string"}, "raw_input_location": {"title": "Raw Input Location", "type": "string"}, "street": {"title": "Street", "type": "string"}, "street_number": {"title": "Street Number", "type": "string"}, "appartment_number": {"title": "Appartment Number", "type": "string"}, "city": {"title": "City", "type": "string"}}, "required": ["formatted_location", "postal_code", "region", "country", "country_code", "raw_input_location", "street", "street_number", "appartment_number", "city"], "title": "ResumeLocation", "type": "object"}, "ResumePersonalInfo": {"properties": {"name": {"$ref": "#/components/schemas/ResumePersonalName"}, "address": {"$ref": "#/components/schemas/ResumeLocation"}, "self_summary": {"title": "Self Summary", "type": "string"}, "objective": {"title": "Objective", "type": "string"}, "date_of_birth": {"title": "Date Of Birth", "type": "string"}, "place_of_birth": {"title": "Place Of Birth", "type": "string"}, "phones": {"items": {"type": "string"}, "title": "Phones", "type": "array"}, "mails": {"items": {"type": "string"}, "title": "Mails", "type": "array"}, "urls": {"items": {"type": "string"}, "title": "Urls", "type": "array"}, "fax": {"items": {"type": "string"}, "title": "Fax", "type": "array"}, "current_profession": {"title": "Current Profession", "type": "string"}, "gender": {"title": "Gender", "type": "string"}, "nationality": {"title": "Nationality", "type": "string"}, "martial_status": {"title": "Martial Status", "type": "string"}, "current_salary": {"title": "Current Salary", "type": "string"}, "availability": {"default": null, "title": "Availability", "type": "string"}}, "required": ["name", "address", "self_summary", "objective", "date_of_birth", "place_of_birth", "current_profession", "gender", "nationality", "martial_status", "current_salary"], "title": "ResumePersonalInfo", "type": "object"}, "ResumePersonalName": {"properties": {"first_name": {"title": "First Name", "type": "string"}, "last_name": {"title": "Last Name", "type": "string"}, "raw_name": {"title": "Raw Name", "type": "string"}, "middle": {"title": "Middle", "type": "string"}, "title": {"title": "Title", "type": "string"}, "prefix": {"title": "Prefix", "type": "string"}, "sufix": {"title": "Sufix", "type": "string"}}, "required": ["first_name", "last_name", "raw_name", "middle", "title", "prefix", "sufix"], "title": "ResumePersonalName", "type": "object"}, "ResumeSkill": {"properties": {"name": {"title": "Name", "type": "string"}, "type": {"title": "Type", "type": "string"}}, "required": ["name", "type"], "title": "ResumeSkill", "type": "object"}, "ResumeWorkExp": {"properties": {"total_years_experience": {"title": "Total Years Experience", "type": "string"}, "entries": {"items": {"$ref": "#/components/schemas/ResumeWorkExpEntry"}, "title": "Entries", "type": "array"}}, "required": ["total_years_experience"], "title": "ResumeWorkExp", "type": "object"}, "ResumeWorkExpEntry": {"properties": {"title": {"title": "Title", "type": "string"}, "start_date": {"title": "Start Date", "type": "string"}, "end_date": {"title": "End Date", "type": "string"}, "company": {"title": "Company", "type": "string"}, "location": {"$ref": "#/components/schemas/ResumeLocation"}, "description": {"title": "Description", "type": "string"}, "type": {"default": null, "title": "Type", "type": "string"}, "industry": {"title": "Industry", "type": "string"}}, "required": ["title", "start_date", "end_date", "company", "location", "description", "industry"], "title": "ResumeWorkExpEntry", "type": "object"}, "Row": {"properties": {"cells": {"items": {"$ref": "#/components/schemas/Cell"}, "title": "Cells", "type": "array"}}, "title": "Row", "type": "object"}, "SegmentSentimentAnalysisDataClass": {"description": "This class is used in SentimentAnalysisDataClass to describe each segment analyzed.\n\nArgs:\n - segment (str): The segment analyzed\n - sentiment (Literal['Positve', 'Negative', 'Neutral']) (Case is ignore): Sentiment of segment\n - sentiment_rate (float between 0 and 1): Rate of sentiment", "properties": {"segment": {"title": "Segment", "type": "string"}, "sentiment": {"allOf": [{"$ref": "#/components/schemas/SentimentEbfEnum"}], "title": "Sentiment"}, "sentiment_rate": {"maximum": 1.0, "minimum": 0.0, "title": "Sentiment Rate", "type": "integer"}}, "required": ["segment", "sentiment", "sentiment_rate"], "title": "SegmentSentimentAnalysisDataClass", "type": "object"}, "SentimentEbfEnum": {"enum": ["Positive", "Negative", "Neutral"], "type": "string"}, "ServiceTierEnum": {"enum": ["auto", "default"], "type": "string", "description": "**Choices**:\n- 'auto': Automatically select appropriate tier\n- 'default': Use default service configuration\n\n* `auto` - auto\n* `default` - default"}, "ShotFrame": {"properties": {"startTimeOffset": {"title": "Starttimeoffset", "type": "integer"}, "endTimeOffset": {"title": "Endtimeoffset", "type": "integer"}}, "required": ["startTimeOffset", "endTimeOffset"], "title": "ShotFrame", "type": "object"}, "SpeechDiarization": {"properties": {"total_speakers": {"title": "Total Speakers", "type": "integer"}, "entries": {"items": {"$ref": "#/components/schemas/SpeechDiarizationEntry"}, "title": "Entries", "type": "array"}, "error_message": {"default": null, "title": "Error Message", "type": "string"}}, "required": ["total_speakers"], "title": "SpeechDiarization", "type": "object"}, "SpeechDiarizationEntry": {"properties": {"segment": {"title": "Segment", "type": "string"}, "start_time": {"title": "Start Time", "type": "string"}, "end_time": {"title": "End Time", "type": "string"}, "speaker": {"title": "Speaker", "type": "integer"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["segment", "start_time", "end_time", "speaker", "confidence"], "title": "SpeechDiarizationEntry", "type": "object"}, "SpeechToTextAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "provider_params": {"type": "string", "description": "\nParameters specific to the provider that you want to send along the request.\n\nit should take a *provider* name as key and an object of parameters as value.\n\nExample:\n\n {\n \"deepgram\": {\n \"filler_words\": true,\n \"smart_format\": true,\n \"callback\": \"https://webhook.site/0000\"\n },\n \"assembly\": {\n \"webhook_url\": \"https://webhook.site/0000\"\n }\n }\n\nPlease refer to the documentation of each provider to see which parameters to send.\n"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "language": {"type": "string", "nullable": true, "description": "Language code expected (ex: en, fr)"}, "speakers": {"type": "integer", "nullable": true, "default": 2, "description": "Number of speakers in the file audio"}, "profanity_filter": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather or not the service will filter profanity and replace inappropriate words with a series of asterisks"}, "custom_vocabulary": {"type": "string", "default": "", "description": "List of words or composed words to be detected by the speech to text engine. (Ex: Word, Mike, Draw, Los Angeles,...)"}, "convert_to_wav": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the audio/video file to wav format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "SpellCheckItem": {"description": "Represents a spell check item with suggestions.\n\nArgs:\n text (str): The text to spell check.\n type (str, optional): The type of the text.\n offset (int): The offset of the text.\n length (int): The length of the text.\n suggestions (Sequence[SuggestionItem], optional): The list of suggestions for the misspelled text.\n\nRaises:\n ValueError: If the offset or length is not positive.\n\nReturns:\n SpellCheckItem: An instance of the SpellCheckItem class.", "properties": {"text": {"title": "Text", "type": "string"}, "type": {"title": "Type", "type": "string"}, "offset": {"minimum": 0, "title": "Offset", "type": "integer"}, "length": {"minimum": 0, "title": "Length", "type": "integer"}, "suggestions": {"items": {"$ref": "#/components/schemas/SuggestionItem"}, "title": "Suggestions", "type": "array"}}, "required": ["text", "type", "offset", "length"], "title": "SpellCheckItem", "type": "object"}, "StateEnum": {"enum": ["finished", "failed", "Timeout error", "processing"], "type": "string", "description": "* `finished` - finished\n* `failed` - failed\n* `Timeout error` - Timeout error\n* `processing` - processing"}, "Status549Enum": {"enum": ["sucess", "fail"], "type": "string"}, "Status889Enum": {"enum": ["succeeded", "failed", "finished", "processing"], "type": "string", "description": "* `succeeded` - Status Succeeded\n* `failed` - Status Failed\n* `finished` - Status Finished\n* `processing` - Status Processing"}, "SubCategoryType": {"enum": ["CreditCard", "CardExpiry", "BankAccountNumber", "BankRoutingNumber", "SwiftCode", "TaxIdentificationNumber", "Name", "Age", "Email", "Phone", "PersonType", "Gender", "SocialSecurityNumber", "NationalIdentificationNumber", "NationalHealthService", "ResidentRegistrationNumber", "DriverLicenseNumber", "PassportNumber", "URL", "IP", "MAC", "VehicleIdentificationNumber", "LicensePlate", "VoterNumber", "AWSKeys", "AzureKeys", "Password", "CompanyName", "CompanyNumber", "BuisnessNumber", "Date", "Time", "DateTime", "Duration", "Address", "Location", "Other", "Anonymized", "Nerd", "Wsd", "Unknown"], "title": "SubCategoryType", "type": "string"}, "Subfeature": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 255}, "fullname": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true, "maxLength": 1000}}, "required": ["name"]}, "SuggestionItem": {"description": "Represents a suggestion for a misspelled word.\n\nArgs:\n suggestion (str): The suggested text.\n score (float, optional): The score of the suggested text (between 0 and 1).\n\nRaises:\n ValueError: If the score is not between 0 and 1.\n\nReturns:\n SuggestionItem: An instance of the SuggestionItem class.", "properties": {"suggestion": {"title": "Suggestion", "type": "string"}, "score": {"maximum": 1.0, "minimum": 0.0, "title": "Score", "type": "integer"}}, "required": ["suggestion", "score"], "title": "SuggestionItem", "type": "object"}, "Table": {"properties": {"rows": {"items": {"$ref": "#/components/schemas/Row"}, "title": "Rows", "type": "array"}, "num_rows": {"title": "Num Rows", "type": "integer"}, "num_cols": {"title": "Num Cols", "type": "integer"}}, "required": ["num_rows", "num_cols"], "title": "Table", "type": "object"}, "TextModerationItem": {"properties": {"label": {"title": "Label", "type": "string"}, "likelihood": {"title": "Likelihood", "type": "integer"}, "category": {"$ref": "#/components/schemas/CategoryType"}, "subcategory": {"$ref": "#/components/schemas/SubCategoryType"}, "likelihood_score": {"title": "Likelihood Score", "type": "integer"}}, "required": ["label", "likelihood", "category", "subcategory", "likelihood_score"], "title": "TextModerationItem", "type": "object"}, "TextToSpeechAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "default": "", "description": "Language code expected (ex: en, fr)"}, "option": {"default": "", "oneOf": [{"$ref": "#/components/schemas/OptionEnum"}, {"$ref": "#/components/schemas/BlankEnum"}]}, "rate": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking rate by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "pitch": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking pitch by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "volume": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the audio volume by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "audio_format": {"type": "string", "nullable": true, "default": "", "description": "Optional parameter to specify the audio format in which the audio will be generated. By default, audios are encoded in MP3, except for lovoai which use the wav container."}, "sampling_rate": {"type": "integer", "maximum": 200000, "minimum": 0, "nullable": true, "default": 0, "description": "Optional. The synthesis sample rate (in hertz) for this audio. When this is specified, the audio will be converted either to the right passed value, or to a the nearest value acceptable by the provider"}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}, "required": ["providers", "text"]}, "ThinkingRequest": {"type": "object", "properties": {"type": {"$ref": "#/components/schemas/ThinkingTypeEnum"}, "budget_tokens": {"type": "integer", "minimum": 1024}}, "required": ["budget_tokens", "type"]}, "ThinkingTypeEnum": {"enum": ["enabled", "disabled"], "type": "string", "description": "* `enabled` - enabled\n* `disabled` - disabled"}, "TokenData": {"properties": {"token": {"title": "Token", "type": "string"}, "data": {"additionalProperties": {"additionalProperties": {"$ref": "#/components/schemas/Details"}, "type": "object"}, "title": "Data", "type": "object"}}, "required": ["token", "data"], "title": "TokenData", "type": "object"}, "TokenTypeEnum": {"enum": ["sandbox_api_token", "api_token"], "type": "string", "description": "* `sandbox_api_token` - Sandbox\n* `api_token` - Back"}, "ToolCall": {"properties": {"id": {"title": "Id", "type": "string"}, "name": {"title": "Name", "type": "string"}, "arguments": {"title": "Arguments", "type": "string"}}, "required": ["id", "name", "arguments"], "title": "ToolCall", "type": "object"}, "ToolChoiceEnum": {"enum": ["auto", "required", "none"], "type": "string", "description": "* `auto` - auto\n* `required` - required\n* `none` - none"}, "TypeDe8Enum": {"enum": ["db", "bucket", "db_vector", "ai"], "type": "string", "description": "* `db` - Db\n* `bucket` - Bucket\n* `db_vector` - Db Vector\n* `ai` - Ai"}, "TypeOfDataEnum": {"enum": ["TRAINING", "TEST"], "type": "string", "description": "* `TRAINING` - TRAINING\n* `TEST` - TEST"}, "UpperCloth": {"properties": {"value": {"title": "Value", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["value", "confidence"], "title": "UpperCloth", "type": "object"}, "UsageTokensDetails": {"properties": {"audio_tokens": {"description": "Number of audio tokens in the prompt", "title": "Audio Tokens", "type": "integer"}, "cached_tokens": {"description": "Number of cached tokens in the prompt", "title": "Cached Tokens", "type": "integer"}, "prompt_tokens": {"description": "Number of tokens in the prompt", "title": "Prompt Tokens", "type": "integer"}, "completion_tokens": {"description": "Number of tokens in the generated completion", "title": "Completion Tokens", "type": "integer"}, "total_tokens": {"description": "Total number of tokens used (prompt + completion)", "title": "Total Tokens", "type": "integer"}, "accepted_prediction_tokens": {"description": "Number of accepted tokens in the prompt", "title": "Accepted Prediction Tokens", "type": "integer"}, "reasoning_tokens": {"description": "Number of reasoning tokens in the prompt", "title": "Reasoning Tokens", "type": "integer"}, "rejected_prediction_tokens": {"description": "Number of rejected tokens in the prompt", "title": "Rejected Prediction Tokens", "type": "integer"}}, "required": ["audio_tokens", "cached_tokens", "prompt_tokens", "completion_tokens", "total_tokens", "accepted_prediction_tokens", "reasoning_tokens", "rejected_prediction_tokens"], "title": "UsageTokensDetails", "type": "object"}, "VideoBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoBoundingBox", "type": "object"}, "VideoFace": {"properties": {"offset": {"title": "Offset", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoBoundingBox"}, "attributes": {"$ref": "#/components/schemas/FaceAttributes"}, "landmarks": {"$ref": "#/components/schemas/LandmarksVideo"}}, "required": ["offset", "bounding_box", "attributes", "landmarks"], "title": "VideoFace", "type": "object"}, "VideoFacePoses": {"properties": {"pitch": {"title": "Pitch", "type": "integer"}, "roll": {"title": "Roll", "type": "integer"}, "yawn": {"title": "Yawn", "type": "integer"}}, "required": ["pitch", "roll", "yawn"], "title": "VideoFacePoses", "type": "object"}, "VideoLabel": {"properties": {"name": {"title": "Name", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "timestamp": {"items": {"$ref": "#/components/schemas/VideoLabelTimeStamp"}, "title": "Timestamp", "type": "array"}, "category": {"items": {"type": "string"}, "title": "Category", "type": "array"}, "bounding_box": {"items": {"$ref": "#/components/schemas/VideoLabelBoundingBox"}, "title": "Bounding Box", "type": "array"}}, "required": ["name", "confidence"], "title": "VideoLabel", "type": "object"}, "VideoLabelBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoLabelBoundingBox", "type": "object"}, "VideoLabelTimeStamp": {"properties": {"start": {"title": "Start", "type": "integer"}, "end": {"title": "End", "type": "integer"}}, "required": ["start", "end"], "title": "VideoLabelTimeStamp", "type": "object"}, "VideoLogo": {"properties": {"timestamp": {"title": "Timestamp", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoLogoBoundingBox"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["timestamp", "bounding_box", "confidence"], "title": "VideoLogo", "type": "object"}, "VideoLogoBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoLogoBoundingBox", "type": "object"}, "VideoObjectBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoObjectBoundingBox", "type": "object"}, "VideoPersonPoses": {"properties": {"pitch": {"title": "Pitch", "type": "integer"}, "roll": {"title": "Roll", "type": "integer"}, "yaw": {"title": "Yaw", "type": "integer"}}, "required": ["pitch", "roll", "yaw"], "title": "VideoPersonPoses", "type": "object"}, "VideoPersonQuality": {"properties": {"brightness": {"title": "Brightness", "type": "integer"}, "sharpness": {"title": "Sharpness", "type": "integer"}}, "required": ["brightness", "sharpness"], "title": "VideoPersonQuality", "type": "object"}, "VideoText": {"properties": {"text": {"title": "Text", "type": "string"}, "frames": {"items": {"$ref": "#/components/schemas/VideoTextFrames"}, "title": "Frames", "type": "array"}}, "required": ["text"], "title": "VideoText", "type": "object"}, "VideoTextBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoTextBoundingBox", "type": "object"}, "VideoTextFrames": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "timestamp": {"title": "Timestamp", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoTextBoundingBox"}}, "required": ["confidence", "timestamp", "bounding_box"], "title": "VideoTextFrames", "type": "object"}, "VideoTrackingBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoTrackingBoundingBox", "type": "object"}, "VideoTrackingPerson": {"properties": {"tracked": {"items": {"$ref": "#/components/schemas/PersonTracking"}, "title": "Tracked", "type": "array"}}, "title": "VideoTrackingPerson", "type": "object"}, "WebhookParameters": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}}, "WebhookParametersRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}}, "Word": {"description": "Word of a document\n\nAttributes:\n text (str): Text detected in the word\n bounding_boxes (Sequence[BoundingBox]): Bounding boxes of the words in the word\n confidence (float): Confidence score of the word", "properties": {"text": {"description": "Text detected in the word", "title": "Text", "type": "string"}, "bounding_box": {"allOf": [{"$ref": "#/components/schemas/BoundingBox"}], "description": "Bounding boxes of the words in the word"}, "confidence": {"description": "Confidence score of the word", "title": "Confidence", "type": "integer"}}, "required": ["text", "bounding_box", "confidence"], "title": "Word", "type": "object"}, "Workflow": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "name": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true}, "content": {"type": "array", "items": {"type": "object", "additionalProperties": {}}}, "created_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}, "output_node": {"type": "string", "nullable": true, "maxLength": 200}, "is_empty": {"type": "boolean", "readOnly": true}, "template": {"type": "integer", "nullable": true}, "code": {"type": "object", "additionalProperties": {}}, "webhook": {"type": "string", "readOnly": true}}, "required": ["created_at", "id", "is_empty", "updated_at", "webhook"]}, "YodaAskLlmResponse": {"properties": {"result": {"title": "Result", "type": "string"}, "llm_provider": {"title": "Llm Provider", "type": "string"}, "llm_model": {"title": "Llm Model", "type": "string"}}, "required": ["result", "llm_provider", "llm_model"], "title": "YodaAskLlmResponse", "type": "object"}, "YodaCreateProjectResponse": {"properties": {"project_id": {"format": "uuid", "title": "Project Id", "type": "string"}}, "required": ["project_id"], "title": "YodaCreateProjectResponse", "type": "object"}, "YodaDeleteResponse": {"properties": {"result": {"default": "Done!", "title": "Result", "type": "string"}}, "title": "YodaDeleteResponse", "type": "object"}, "YodaInfoResponse": {"properties": {"db_provider": {"title": "Db Provider", "type": "string"}, "embeddings_provider": {"title": "Embeddings Provider", "type": "string"}, "llm_provider": {"title": "Llm Provider", "type": "string"}, "llm_model": {"title": "Llm Model", "type": "string"}, "collection_size": {"title": "Collection Size", "type": "integer"}}, "required": ["db_provider", "embeddings_provider", "llm_provider", "llm_model", "collection_size"], "title": "YodaInfoResponse", "type": "object"}, "YodaListChunksIdsResponse": {"properties": {"chunks_ids": {"items": {"type": "string"}, "title": "Chunks Ids", "type": "array"}}, "required": ["chunks_ids"], "title": "YodaListChunksIdsResponse", "type": "object"}, "YodaQueryResponse": {"properties": {"result": {"items": {"$ref": "#/components/schemas/YodaQueryResponseItem"}, "title": "Result", "type": "array"}}, "required": ["result"], "title": "YodaQueryResponse", "type": "object"}, "YodaQueryResponseItem": {"properties": {"id": {"format": "uuid", "title": "Id", "type": "string"}, "version": {"title": "Version", "type": "integer"}, "score": {"title": "Score", "type": "integer"}, "payload": {"$ref": "#/components/schemas/YodaQueryResponsePayload"}, "vector": {"title": "Vector"}}, "required": ["id", "version", "score", "payload", "vector"], "title": "YodaQueryResponseItem", "type": "object"}, "YodaQueryResponsePayload": {"properties": {"metadata": {"title": "Metadata", "type": "object"}, "page_content": {"title": "Page Content", "type": "string"}}, "required": ["metadata", "page_content"], "title": "YodaQueryResponsePayload", "type": "object"}, "asyncaudiospeech_to_text_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/audiospeech_to_text_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncaudiospeech_to_text_asyncResponseModel", "type": "object"}, "asyncaudiotext_to_speech_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/audiotext_to_speech_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncaudiotext_to_speech_asyncResponseModel", "type": "object"}, "asyncocranonymization_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocranonymization_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocranonymization_asyncResponseModel", "type": "object"}, "asyncocrcustom_document_parsing_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocrcustom_document_parsing_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocrcustom_document_parsing_asyncResponseModel", "type": "object"}, "asyncocrocr_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocrocr_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocrocr_asyncResponseModel", "type": "object"}, "asyncocrocr_tables_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocrocr_tables_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocrocr_tables_asyncResponseModel", "type": "object"}, "asyncvideodeepfake_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videodeepfake_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideodeepfake_detection_asyncResponseModel", "type": "object"}, "asyncvideoexplicit_content_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoexplicit_content_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoexplicit_content_detection_asyncResponseModel", "type": "object"}, "asyncvideoface_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoface_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoface_detection_asyncResponseModel", "type": "object"}, "asyncvideogeneration_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videogeneration_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideogeneration_asyncResponseModel", "type": "object"}, "asyncvideolabel_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videolabel_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideolabel_detection_asyncResponseModel", "type": "object"}, "asyncvideologo_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videologo_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideologo_detection_asyncResponseModel", "type": "object"}, "asyncvideoobject_tracking_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoobject_tracking_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoobject_tracking_asyncResponseModel", "type": "object"}, "asyncvideoperson_tracking_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoperson_tracking_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoperson_tracking_asyncResponseModel", "type": "object"}, "asyncvideoquestion_answer_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoquestion_answer_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoquestion_answer_asyncResponseModel", "type": "object"}, "asyncvideoshot_change_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoshot_change_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoshot_change_detection_asyncResponseModel", "type": "object"}, "asyncvideotext_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videotext_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideotext_detection_asyncResponseModel", "type": "object"}, "audiospeech_to_text_asyncModel": {"properties": {"speechmatics": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "assembly": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "symbl": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "voci": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "deepgram": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "voxist": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "faker": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "gladia": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}}, "title": "audiospeech_to_text_asyncModel", "type": "object"}, "audiospeech_to_text_asyncSpeechToTextAsyncDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "diarization": {"$ref": "#/components/schemas/SpeechDiarization"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["text", "diarization", "id", "final_status"], "title": "audiospeech_to_text_asyncSpeechToTextAsyncDataClass", "type": "object"}, "audiotext_to_speechResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "elevenlabs": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "deepgram": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "lovoai": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}}, "title": "audiotext_to_speechResponseModel", "type": "object"}, "audiotext_to_speechTextToSpeechDataClass": {"properties": {"audio": {"title": "Audio", "type": "string"}, "voice_type": {"title": "Voice Type", "type": "integer"}, "audio_resource_url": {"title": "Audio Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["audio", "voice_type", "audio_resource_url", "status"], "title": "audiotext_to_speechTextToSpeechDataClass", "type": "object"}, "audiotext_to_speechTextToSpeechRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "default": "", "description": "Language code expected (ex: en, fr)"}, "option": {"default": "", "oneOf": [{"$ref": "#/components/schemas/OptionEnum"}, {"$ref": "#/components/schemas/BlankEnum"}]}, "rate": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking rate by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "pitch": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking pitch by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "volume": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the audio volume by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "audio_format": {"type": "string", "nullable": true, "default": "", "description": "Optional parameter to specify the audio format in which the audio will be generated. By default, audios are encoded in MP3, except for lovoai which use the wav container."}, "sampling_rate": {"type": "integer", "maximum": 200000, "minimum": 0, "nullable": true, "default": 0, "description": "Optional. The synthesis sample rate (in hertz) for this audio. When this is specified, the audio will be converted either to the right passed value, or to a the nearest value acceptable by the provider"}}, "required": ["providers", "text"]}, "audiotext_to_speech_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speech_asyncTextToSpeechAsyncDataClass"}], "default": null}, "lovoai": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speech_asyncTextToSpeechAsyncDataClass"}], "default": null}}, "title": "audiotext_to_speech_asyncModel", "type": "object"}, "audiotext_to_speech_asyncTextToSpeechAsyncDataClass": {"properties": {"audio": {"title": "Audio", "type": "string"}, "voice_type": {"title": "Voice Type", "type": "integer"}, "audio_resource_url": {"title": "Audio Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["audio", "voice_type", "audio_resource_url", "id", "final_status"], "title": "audiotext_to_speech_asyncTextToSpeechAsyncDataClass", "type": "object"}, "credits_serializer": {"type": "object", "properties": {"credits": {"type": "number", "format": "double"}}, "required": ["credits"]}, "imageai_detectionAiDetectionDataClass": {"properties": {"ai_score": {"maximum": 1.0, "minimum": 0.0, "title": "Ai Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/ImageaiDetectionAiDetectionDataClassPredictionEnum"}], "title": "Prediction"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["ai_score", "prediction", "status"], "title": "imageai_detectionAiDetectionDataClass", "type": "object"}, "imageai_detectionAiDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageai_detectionResponseModel": {"properties": {"winstonai": {"allOf": [{"$ref": "#/components/schemas/imageai_detectionAiDetectionDataClass"}], "default": null}}, "title": "imageai_detectionResponseModel", "type": "object"}, "imageanonymizationAnonymizationDataClass": {"properties": {"image": {"title": "Image", "type": "string"}, "image_resource_url": {"title": "Image Resource Url", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/AnonymizationItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["image", "image_resource_url", "status"], "title": "imageanonymizationAnonymizationDataClass", "type": "object"}, "imageanonymizationResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/imageanonymizationAnonymizationDataClass"}], "default": null}}, "title": "imageanonymizationResponseModel", "type": "object"}, "imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageautoml_classificationAutomlClassificationCreateProjectDataClass": {"properties": {"name": {"title": "Name", "type": "string"}, "project_id": {"title": "Project Id", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["name", "project_id", "status"], "title": "imageautoml_classificationAutomlClassificationCreateProjectDataClass", "type": "object"}, "imageautoml_classificationResponseModel": {"properties": {"nyckel": {"allOf": [{"$ref": "#/components/schemas/imageautoml_classificationAutomlClassificationCreateProjectDataClass"}], "default": null}}, "title": "imageautoml_classificationResponseModel", "type": "object"}, "imageautoml_classificationcreate_projectAutomlClassificationCreateProjectRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "name": {"type": "string", "minLength": 1, "description": "Name of your project", "maxLength": 250}}, "required": ["providers"]}, "imageautoml_classificationdelete_projectAutomlClassificationDeleteRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}}, "required": ["project_id", "providers"]}, "imagebackground_removalBackgroundRemovalDataClass": {"properties": {"image_b64": {"description": "The image in base64 format.", "title": "Image B64", "type": "string"}, "image_resource_url": {"description": "The image url.", "title": "Image Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["image_b64", "image_resource_url", "status"], "title": "imagebackground_removalBackgroundRemovalDataClass", "type": "object"}, "imagebackground_removalBackgroundRemovalRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "provider_params": {"type": "string", "default": {}, "description": "Provider specific parameters"}}, "required": ["providers"]}, "imagebackground_removalResponseModel": {"properties": {"clipdrop": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "api4ai": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "picsart": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "photoroom": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "stabilityai": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "sentisight": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}}, "title": "imagebackground_removalResponseModel", "type": "object"}, "imagedeepfake_detectionDeepfakeDetectionDataClass": {"properties": {"deepfake_score": {"maximum": 1.0, "minimum": 0.0, "title": "Deepfake Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/PredictionB20Enum"}], "title": "Prediction"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["deepfake_score", "prediction", "status"], "title": "imagedeepfake_detectionDeepfakeDetectionDataClass", "type": "object"}, "imagedeepfake_detectionResponseModel": {"properties": {"sightengine": {"allOf": [{"$ref": "#/components/schemas/imagedeepfake_detectionDeepfakeDetectionDataClass"}], "default": null}}, "title": "imagedeepfake_detectionResponseModel", "type": "object"}, "imageembeddingsEmbeddingsDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/EmbeddingDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageembeddingsEmbeddingsDataClass", "type": "object"}, "imageembeddingsEmbeddingsRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "representation": {"allOf": [{"$ref": "#/components/schemas/RepresentationEnum"}], "description": "The type of embedding representation to embed the image with\n\n* `document` - document\n* `query` - query\n* `symetric` - symetric"}}, "required": ["providers", "representation"]}, "imageembeddingsResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/imageembeddingsEmbeddingsDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageembeddingsEmbeddingsDataClass"}], "default": null}, "alephalpha": {"allOf": [{"$ref": "#/components/schemas/imageembeddingsEmbeddingsDataClass"}], "default": null}}, "title": "imageembeddingsResponseModel", "type": "object"}, "imageexplicit_contentExplicitContentDataClass": {"properties": {"nsfw_likelihood": {"description": "An integer representing the likelihood of NSFW content. Higher values indicate a higher likelihood.", "title": "Nsfw Likelihood", "type": "integer"}, "nsfw_likelihood_score": {"description": "A floating-point score representing the confidence level of the NSFW likelihood assessment. This is typically a value between 0.0 and 1.0.", "title": "Nsfw Likelihood Score", "type": "integer"}, "items": {"description": "A list of items identified as potentially explicit. Each item contains details of the explicit content detected.", "items": {"$ref": "#/components/schemas/ExplicitItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["nsfw_likelihood", "nsfw_likelihood_score", "status"], "title": "imageexplicit_contentExplicitContentDataClass", "type": "object"}, "imageexplicit_contentResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "sentisight": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}}, "title": "imageexplicit_contentResponseModel", "type": "object"}, "imageface_compareFaceCompareDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/FaceMatch"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageface_compareFaceCompareDataClass", "type": "object"}, "imageface_compareFaceCompareRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file1": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file1_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file2": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file2_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageface_compareResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/imageface_compareFaceCompareDataClass"}], "default": null}, "facepp": {"allOf": [{"$ref": "#/components/schemas/imageface_compareFaceCompareDataClass"}], "default": null}, "base64": {"allOf": [{"$ref": "#/components/schemas/imageface_compareFaceCompareDataClass"}], "default": null}}, "title": "imageface_compareResponseModel", "type": "object"}, "imageface_detectionFaceDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/FaceItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageface_detectionFaceDetectionDataClass", "type": "object"}, "imageface_detectionFaceDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageface_detectionResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}}, "title": "imageface_detectionResponseModel", "type": "object"}, "imageface_recognitionFaceRecognitionAddFaceDataClass": {"properties": {"face_ids": {"items": {"type": "string"}, "title": "Face Ids", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["face_ids", "status"], "title": "imageface_recognitionFaceRecognitionAddFaceDataClass", "type": "object"}, "imageface_recognitionResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/imageface_recognitionFaceRecognitionAddFaceDataClass"}], "default": null}, "facepp": {"allOf": [{"$ref": "#/components/schemas/imageface_recognitionFaceRecognitionAddFaceDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageface_recognitionFaceRecognitionAddFaceDataClass"}], "default": null}}, "title": "imageface_recognitionResponseModel", "type": "object"}, "imageface_recognitionadd_faceFaceRecognitionAddFaceRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|bmp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageface_recognitiondelete_faceFaceRecognitionDeleteFaceRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "face_id": {"type": "string", "minLength": 1, "description": "ID of face to delete"}}, "required": ["face_id", "providers"]}, "imageface_recognitionrecognizeFaceRecognitionDetectFaceRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|bmp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imagegenerationGenerationDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/GeneratedImageDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagegenerationGenerationDataClass", "type": "object"}, "imagegenerationGenerationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Description of the desired image(s)."}, "resolution": {"type": "string", "minLength": 1, "description": "The image resolution (ex: 512x512, 1024x1024)", "maxLength": 50}, "num_images": {"type": "integer", "maximum": 10, "minimum": 1, "default": 1, "description": "The number of images to generate. Must be between 1 and 10."}}, "required": ["providers", "resolution", "text"]}, "imagegenerationResponseModel": {"properties": {"deepai": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "minimax": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "stabilityai": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "leonardo": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "bytedance": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "replicate": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}}, "title": "imagegenerationResponseModel", "type": "object"}, "imagelandmark_detectionLandmarkDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/LandmarkItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagelandmark_detectionLandmarkDetectionDataClass", "type": "object"}, "imagelandmark_detectionResponseModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/imagelandmark_detectionLandmarkDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imagelandmark_detectionLandmarkDetectionDataClass"}], "default": null}}, "title": "imagelandmark_detectionResponseModel", "type": "object"}, "imagelogo_detectionLogoDetectionDataClass": {"properties": {"items": {"description": "List of the detected brands logo from the image.", "items": {"$ref": "#/components/schemas/LogoItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagelogo_detectionLogoDetectionDataClass", "type": "object"}, "imagelogo_detectionLogoDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imagelogo_detectionResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "smartclick": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}}, "title": "imagelogo_detectionResponseModel", "type": "object"}, "imageobject_detectionObjectDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/ObjectItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageobject_detectionObjectDetectionDataClass", "type": "object"}, "imageobject_detectionObjectDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageobject_detectionResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "sentisight": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}}, "title": "imageobject_detectionResponseModel", "type": "object"}, "imagequestion_answerQuestionAnswerDataClass": {"properties": {"answers": {"items": {"type": "string"}, "title": "Answers", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagequestion_answerQuestionAnswerDataClass", "type": "object"}, "imagequestion_answerQuestionAnswerRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "question": {"type": "string", "minLength": 1, "description": "Question about the image"}, "max_tokens": {"type": "integer", "maximum": 2048, "minimum": 1, "default": 1000, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}}, "required": ["providers"]}, "imagequestion_answerResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerDataClass"}], "default": null}, "alephalpha": {"allOf": [{"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerDataClass"}], "default": null}}, "title": "imagequestion_answerResponseModel", "type": "object"}, "imagesearchResponseModel": {"properties": {"sentisight": {"allOf": [{"$ref": "#/components/schemas/imagesearchSearchDeleteImageDataClass"}], "default": null}, "nyckel": {"allOf": [{"$ref": "#/components/schemas/imagesearchSearchDeleteImageDataClass"}], "default": null}}, "title": "imagesearchResponseModel", "type": "object"}, "imagesearchSearchDeleteImageDataClass": {"properties": {"status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}}, "required": ["status"], "title": "imagesearchSearchDeleteImageDataClass", "type": "object"}, "imagesearchdelete_imageDeleteImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "image_name": {"type": "string", "minLength": 1}}, "required": ["image_name", "providers"]}, "imagesearchlaunch_similaritySearchImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "n": {"type": "integer", "minimum": 1, "default": 10, "description": "The `n` parameter specifies the number of images you want to be returned in the response. It determines the count of the most similar images to the input image that will be included in the response. By default, it is set to 10."}}, "required": ["providers"]}, "imagesearchupload_imageUploadImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "image_name": {"type": "string", "minLength": 1, "description": "The image name need to have the extension of the file."}}, "required": ["image_name", "providers"]}, "llmchatChatDataClass": {"properties": {"id": {"description": "Unique identifier for this completion", "title": "Id", "type": "string"}, "object": {"description": "Object type, always 'chat.completion'", "title": "Object", "type": "string"}, "created": {"description": "Unix timestamp for when the completion was created", "title": "Created", "type": "integer"}, "model": {"description": "The model used for completion", "title": "Model", "type": "string"}, "choices": {"description": "List of chat completion choices generated by the model", "items": {"$ref": "#/components/schemas/ChatCompletionChoice"}, "title": "Choices", "type": "array"}, "usage": {"allOf": [{"$ref": "#/components/schemas/ChatCompletionUsage"}], "description": "Usage statistics for the completion request"}, "system_fingerprint": {"default": null, "description": "Identifier for the system version that processed the request", "title": "System Fingerprint", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["id", "object", "created", "model", "choices", "usage", "status"], "title": "llmchatChatDataClass", "type": "object"}, "llmchatResponseModel": {"properties": {"dashscope": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "together_ai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "deepinfra": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "iointelligence": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "cloudflare": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "perplexityai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "replicate": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "meta": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "groq": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "bytedance": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "deepseek": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "fireworks_ai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "databricks": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "nebius": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "ovhcloud": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "huggingface": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "minimax": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "cerebras": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}}, "title": "llmchatResponseModel", "type": "object"}, "llmchatllmchatChatRequest": {"type": "object", "properties": {"messages": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "A list containing all the conversations between the user and the assistant.\nEach item in the list should be a dictionary with two keys: 'role' and 'message'.\n\n**role**: Specifies the role of the speaker and can have the values 'user', 'system', 'assistant' or 'tool'.\nThe system role instructs the way the model should answer, e.g. 'You are a helpful assistant'. The user\nrole specifies the user query and assistant is the model's response. The tool role is for external tools that\ncan be used in the conversation.\n\n**message**: A list of dictionaries. Each dictionary in the 'message' list must contain the keys 'type' and 'content'.\n\n#### Structure\n- **type**: Specifies the type of content and can be 'image_url' or 'text'.\n- **content**: A dictionary with the actual content based on the 'type':\n - If 'type' is 'image_url', 'content' must contain 'image_url' and must not contain 'text'.\n - If 'type' is 'text', 'content' must contain 'text' and must not contain 'image_url'.\n\n#### Example\n```json\n[\n {\n \"role\": \"user\",\n \"content\": [\n {\n \"type\": \"text\",\n \"text\": \"Describe this image\"\n },\n {\n \"type\": \"image_url\",\n \"image_url\": {\n \"url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg\"\n }\n }\n ]\n }\n]\n```"}, "model": {"type": "string", "minLength": 1, "description": "The OpenAI model to use for the chat completion. \nThis field is required and specifies which language model will process the conversation. \n\n**Example values**: 'gpt-3.5-turbo', 'gpt-4', 'gpt-4-turbo'"}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "metadata": {"type": "array", "items": {"$ref": "#/components/schemas/MetadataRequest"}, "description": "Optional list of metadata associated with the chat request. \nCan be used to provide additional context or tracking information. \n\n**Example**:\n```json\n{\n \"metadata\": [\n {\"key\": \"conversation_id\", \"value\": \"chat_12345\"},\n {\"key\": \"source\", \"value\": \"customer_support\"}\n ]\n}\n```"}, "frequency_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Controls repetitiveness of model responses by penalizing frequent tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Reduce token repetition\n- Negative values: Encourage repetition\n- 0.0: Default behavior\n\n**Example**: 1.5 to significantly reduce repeated phrases"}, "logit_bias": {"type": "object", "additionalProperties": {"type": "number", "format": "double"}, "description": "Modify the likelihood of specific tokens appearing in the response. \nA dictionary where keys are token IDs and values are bias scores. \n\n**Example**:\n```json\n{\n \"logit_bias\": {\n \"50256\": -100, # Reduce probability of end-of-text token\n \"15\": 5 # Slightly increase probability of a specific token\n }\n}\n```"}, "logprobs": {"type": "boolean", "description": "If set to True, returns log probabilities of the most likely tokens. \nUseful for advanced token probability analysis. \n\n**Example**: True to get detailed token likelihood information"}, "top_logprobs": {"type": "integer", "maximum": 20, "minimum": 0, "description": "Number of top log probabilities to return with each token. \nMust be between 0 and 20. \n\n**Example**: 5 to get top 5 most likely tokens for each position"}, "max_completion_tokens": {"type": "integer", "minimum": 1, "description": "Maximum number of tokens to generate in the completion. \nMust be at least 1. \n\n**Example**: 150 to limit response to approximately 100-150 words"}, "n": {"type": "integer", "minimum": 1, "description": "Number of chat completion choices to generate.\n\n **Example**: 3 to generate multiple alternative responses"}, "modalities": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of supported input/output modalities for the chat. \n\n**Example**:\n```json\n{\n \"modalities\": [\"text\", \"image\", \"audio\"]\n}\n```"}, "prediction": {"type": "object", "additionalProperties": {}, "description": "Optional field for storing prediction-related information. \nFlexible dictionary to capture model's predictive metadata. \n\n**Example**:\n```json\n{\n \"prediction\": {\n \"confidence_score\": 0.85,\n \"top_prediction\": \"response_category\"\n }\n}\n```"}, "audio": {"type": "object", "additionalProperties": {}, "description": "Optional dictionary for audio-related parameters or metadata. \n\n**Example**:\n```json\n{\n \"audio\": {\n \"language\": \"en-US\",\n \"transcription_format\": \"srt\"\n }\n}\n```"}, "presence_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Adjusts likelihood of discussing new topics by penalizing existing tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Encourage more diverse topics\n- Negative values: Keep discussion more focused\n- 0.0: Default behavior\n\n**Example**: 1.0 to promote topic diversity"}, "response_format": {"type": "object", "additionalProperties": {}, "description": "Specify the desired response format for the completion. \n\n**Example**:\n```json\n{\n \"response_format\": {\n \"type\": \"json_object\",\n \"schema\": {...}\n }\n}\n```"}, "seed": {"type": "integer", "description": "Set a seed for deterministic sampling to reproduce consistent results. \n\n**Example**: 42 for a reproducible random generation process"}, "service_tier": {"allOf": [{"$ref": "#/components/schemas/ServiceTierEnum"}], "description": "Select the service tier for the API request. \n\n**Choices**:\n- 'auto': Automatically select appropriate tier\n- 'default': Use default service configuration\n\n* `auto` - auto\n* `default` - default"}, "stop": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of strings that will cause the model to stop generating. \n\n**Example**:\n```json\n{\n \"stop\": [\"\\n\", \"Human:\", \"AI:\"]\n}\n```"}, "stream": {"type": "boolean", "default": false, "description": "If True, returns tokens as they are generated in a streaming format. \nDefault is False. \n\n**Example**: True for real-time token streaming"}, "stream_options": {"type": "object", "additionalProperties": {}, "description": "Additional configuration for streaming responses. \n\n**Example**:\n```json\n{\n \"stream_options\": {\n \"include_usage\": true\n }\n}\n```"}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "description": "Controls randomness in token selection. \nRanges from 0.0 to 2.0. \n\n**Values**:\n- 0.0: Most deterministic, focused responses\n- 1.0: Balanced randomness\n- 2.0: Most creative, unpredictable responses\n\n**Example**: 0.7 for a good balance of creativity and focus"}, "top_p": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "description": "Nucleus sampling threshold for token selection. \nRanges from 0.0 to 1.0. Default is 1.0. \n\n**Values**:\n- 1.0: Consider all tokens\n- Lower values: More focused, deterministic sampling\n\n**Example**: 0.9 to select from top 90% most probable tokens"}, "tools": {"type": "array", "items": {}, "description": "List of tools or function definitions available to the model. \n\n**Example**:\n```json\n{\n \"tools\": [\n {\n \"type\": \"function\",\n \"function\": {\n \"name\": \"get_weather\",\n \"description\": \"Retrieve current weather\"\n }\n }\n ]\n}\n```"}, "tool_choice": {"type": "string", "minLength": 1, "description": "Specify how tools should be used in the completion. \n\n**Example values**:\n- 'auto': Model decides when to use tools\n- 'none': Disable tool usage\n- Specific tool name to always use a particular tool"}, "parallel_tool_calls": {"type": "boolean", "description": "Allow the model to make multiple tool calls in parallel. \n\n**Example**: True to enable concurrent tool invocations"}, "user": {"type": "string", "minLength": 1, "description": "Optional identifier for the end-user to help track and monitor API usage. \n\n**Example**: 'user_123456'"}, "function_call": {"type": "string", "minLength": 1, "description": "Control how function calls are handled. \n\n**Example values**:\n- 'auto': Default behavior\n- 'none': Disable function calls\n- Specific function name to force its execution"}, "functions": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "List of function definitions available to the model. \n\n**Example**:\n```json\n{\n \"functions\": [\n {\n \"name\": \"get_current_weather\",\n \"description\": \"Get the current weather for a location\",\n \"parameters\": {...}\n }\n ]\n}\n```"}, "thinking": {"allOf": [{"$ref": "#/components/schemas/ThinkingRequest"}], "description": "Configuration for enabling Claude's extended thinking. When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer. Requires a minimum budget of 1,024 tokens and counts towards your max_tokens limit.\n\n**Example**:\n```json\n{\n 'thinking': {\n 'type': 'enabled'\n 'budget_tokens': '1024' }\n}\n```"}, "web_search_options": {"type": "object", "additionalProperties": {}, "description": "Options for web search integration. \n **Example**:\n ```json\n web_search_options={\n \"search_context_size\": \"medium\" # Options: \"low\", \"medium\", \"high\"\n }\n ```"}}, "required": ["messages", "model"]}, "multimodalchatChatDataClass": {"properties": {"generated_text": {"title": "Generated Text", "type": "string"}, "messages": {"items": {"$ref": "#/components/schemas/ChatMessageDataClass"}, "title": "Messages", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["generated_text", "status"], "title": "multimodalchatChatDataClass", "type": "object"}, "multimodalchatChatRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "messages": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "A list containing all the conversations between the user and the assistant. Each item in the list should be a dictionary with two keys: 'role' and 'message'.\n\n**role**: Specifies the role of the speaker and can have the values 'user' or 'assistant'.\n\n**message**: A list of dictionaries. Each dictionary in the 'message' list must contain the keys 'type' and 'content'.\n\n#### Structure\n- **type**: Specifies the type of content and can be 'media_url', 'media_base64', or 'text'.\n- **content**: A dictionary with the actual content based on the 'type':\n - If 'type' is 'media_url', 'content' must contain 'media_url' and must not contain 'media_base64' or 'text'.\n - If 'type' is 'media_base64', 'content' must contain 'media_base64' and must not contain 'media_url' or 'text'.\n - If 'type' is 'text', 'content' must contain 'text' and must not contain 'media_url' or 'media_base64'.\n\n#### Example\n```json\n[\n {\n 'role': 'user',\n 'content': [\n {\n 'type': 'text',\n 'content': {'text': 'Describe this image'}\n },\n {\n 'type': 'media_url',\n 'content': {\n 'media_url': 'https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg',\n 'media_type': 'image/jpeg'}\n },\n ]\n }\n]\n```"}, "chatbot_global_action": {"type": "string", "nullable": true, "description": "A system message that helps set the behavior of the assistant. For example, 'You are a helpful assistant'."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Controls the creativity of the model's responses. Higher values (up to 2) make the output more random, while lower values make it more focused and deterministic. A value of 0 (argmax sampling) is useful for scenarios requiring precise answers."}, "max_tokens": {"type": "integer", "maximum": 300000, "minimum": 1, "default": 2048, "description": "The maximum number of tokens to generate in the completion. This value, combined with the token count of your prompt, cannot exceed the model's context length."}, "stop_sequences": {"type": "array", "items": {"type": "string"}, "default": [], "description": "A list of sequences where the model will stop generating further tokens. Useful for controlling response length and format."}, "top_k": {"type": "integer", "maximum": 500, "minimum": 0, "description": "Limits the sampling pool to the top K options for each token. Setting this to a lower value can make the output more focused and deterministic."}, "top_p": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "description": "Enables nucleus sampling, where the model considers the smallest number of tokens whose cumulative probability is at least top_p. This allows for a dynamic selection of tokens based on probability, offering a balance between focus and creativity."}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}}, "required": ["messages", "providers"]}, "multimodalchatResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}}, "title": "multimodalchatResponseModel", "type": "object"}, "ocranonymization_asyncAnonymizationAsyncDataClass": {"properties": {"document": {"title": "Document", "type": "string"}, "document_url": {"title": "Document Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["document", "document_url", "id", "final_status"], "title": "ocranonymization_asyncAnonymizationAsyncDataClass", "type": "object"}, "ocranonymization_asyncModel": {"properties": {"readyredact": {"allOf": [{"$ref": "#/components/schemas/ocranonymization_asyncAnonymizationAsyncDataClass"}], "default": null}, "privateai": {"allOf": [{"$ref": "#/components/schemas/ocranonymization_asyncAnonymizationAsyncDataClass"}], "default": null}, "base64": {"allOf": [{"$ref": "#/components/schemas/ocranonymization_asyncAnonymizationAsyncDataClass"}], "default": null}}, "title": "ocranonymization_asyncModel", "type": "object"}, "ocrbank_check_parsingBankCheckParsingDataClass": {"properties": {"extracted_data": {"items": {"$ref": "#/components/schemas/ItemBankCheckParsingDataClass"}, "title": "Extracted Data", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocrbank_check_parsingBankCheckParsingDataClass", "type": "object"}, "ocrbank_check_parsingBankCheckParsingRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocrbank_check_parsingResponseModel": {"properties": {"veryfi": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}, "mindee": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}, "base64": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}}, "title": "ocrbank_check_parsingResponseModel", "type": "object"}, "ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass", "type": "object"}, "ocrcustom_document_parsing_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass"}], "default": null}}, "title": "ocrcustom_document_parsing_asyncModel", "type": "object"}, "ocrdata_extractionDataExtractionDataClass": {"properties": {"fields": {"items": {"$ref": "#/components/schemas/ItemDataExtraction"}, "title": "Fields", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocrdata_extractionDataExtractionDataClass", "type": "object"}, "ocrdata_extractionDataExtractionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocrdata_extractionResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/ocrdata_extractionDataExtractionDataClass"}], "default": null}, "base64": {"allOf": [{"$ref": "#/components/schemas/ocrdata_extractionDataExtractionDataClass"}], "default": null}}, "title": "ocrdata_extractionResponseModel", "type": "object"}, "ocrfinancial_parserFinancialParserDataClass": {"properties": {"extracted_data": {"description": "List of parsed financial data objects (per page).", "items": {"$ref": "#/components/schemas/FinancialParserObjectDataClass"}, "title": "Extracted Data", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocrfinancial_parserFinancialParserDataClass", "type": "object"}, "ocrfinancial_parserFinancialParserRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "language": {"type": "string", "nullable": true, "description": "Language code of the language the document is written in (ex: fr (French), en (English), es (Spanish))"}, "document_type": {"allOf": [{"$ref": "#/components/schemas/DocumentTypeEnum"}], "default": "invoice", "description": "Specify the type of your document. Can be Set to 'auto-detect' for automatic detection if the provider supports it. Otherwise, the default is 'invoice'.\n\n* `auto-detect` - auto-detect\n* `invoice` - invoice\n* `receipt` - receipt"}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocrfinancial_parserResponseModel": {"properties": {"veryfi": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "eagledoc": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "mindee": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "tabscanner": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "affinda": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "base64": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "klippa": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "dataleon": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}}, "title": "ocrfinancial_parserResponseModel", "type": "object"}, "ocridentity_parserIdentityParserDataClass": {"properties": {"extracted_data": {"items": {"$ref": "#/components/schemas/InfosIdentityParserDataClass"}, "title": "Extracted Data", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocridentity_parserIdentityParserDataClass", "type": "object"}, "ocridentity_parserIdentityParserRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocridentity_parserResponseModel": {"properties": {"mindee": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "affinda": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "base64": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "klippa": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}}, "title": "ocridentity_parserResponseModel", "type": "object"}, "ocrocrOcrDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "bounding_boxes": {"items": {"$ref": "#/components/schemas/Bounding_box"}, "title": "Bounding Boxes", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "ocrocrOcrDataClass", "type": "object"}, "ocrocrOcrRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "language": {"type": "string", "nullable": true, "description": "Language code of the language the document is written in (ex: fr (French), en (English), es (Spanish))"}}, "required": ["providers"]}, "ocrocrResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "base64": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "sentisight": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}}, "title": "ocrocrResponseModel", "type": "object"}, "ocrocr_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}}, "title": "ocrocr_asyncModel", "type": "object"}, "ocrocr_asyncOcrAsyncDataClass": {"properties": {"raw_text": {"title": "Raw Text", "type": "string"}, "pages": {"description": "List of pages", "items": {"$ref": "#/components/schemas/Page"}, "title": "Pages", "type": "array"}, "number_of_pages": {"description": "Number of pages in the document", "title": "Number Of Pages", "type": "integer"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["raw_text", "number_of_pages", "id", "final_status"], "title": "ocrocr_asyncOcrAsyncDataClass", "type": "object"}, "ocrocr_tables_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/ocrocr_tables_asyncOcrTablesAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrocr_tables_asyncOcrTablesAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrocr_tables_asyncOcrTablesAsyncDataClass"}], "default": null}}, "title": "ocrocr_tables_asyncModel", "type": "object"}, "ocrocr_tables_asyncOcrTablesAsyncDataClass": {"properties": {"pages": {"items": {"$ref": "#/components/schemas/Page"}, "title": "Pages", "type": "array"}, "num_pages": {"title": "Num Pages", "type": "integer"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["num_pages", "id", "final_status"], "title": "ocrocr_tables_asyncOcrTablesAsyncDataClass", "type": "object"}, "ocrresume_parserResponseModel": {"properties": {"hireability": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "affinda": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "senseloaf": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "klippa": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}}, "title": "ocrresume_parserResponseModel", "type": "object"}, "ocrresume_parserResumeParserDataClass": {"properties": {"extracted_data": {"$ref": "#/components/schemas/ResumeExtractedData"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["extracted_data", "status"], "title": "ocrresume_parserResumeParserDataClass", "type": "object"}, "ocrresume_parserResumeParserRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "textai_detectionAiDetectionDataClass": {"properties": {"ai_score": {"title": "Ai Score", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/AiDetectionItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["ai_score", "status"], "title": "textai_detectionAiDetectionDataClass", "type": "object"}, "textai_detectionAiDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "provider_params": {"type": "string", "description": "\nParameters specific to the provider that you want to send along the request.\n\nit should take a *provider* name as key and an object of parameters as value.\n\nExample:\n\n {\n \"deepgram\": {\n \"filler_words\": true,\n \"smart_format\": true,\n \"callback\": \"https://webhook.site/0000\"\n },\n \"assembly\": {\n \"webhook_url\": \"https://webhook.site/0000\"\n }\n }\n\nPlease refer to the documentation of each provider to see which parameters to send.\n"}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}}, "required": ["providers", "text"]}, "textai_detectionResponseModel": {"properties": {"winstonai": {"allOf": [{"$ref": "#/components/schemas/textai_detectionAiDetectionDataClass"}], "default": null}, "sapling": {"allOf": [{"$ref": "#/components/schemas/textai_detectionAiDetectionDataClass"}], "default": null}, "originalityai": {"allOf": [{"$ref": "#/components/schemas/textai_detectionAiDetectionDataClass"}], "default": null}}, "title": "textai_detectionResponseModel", "type": "object"}, "textanonymizationAnonymizationDataClass": {"properties": {"result": {"title": "Result", "type": "string"}, "entities": {"items": {"$ref": "#/components/schemas/AnonymizationEntity"}, "title": "Entities", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["result", "status"], "title": "textanonymizationAnonymizationDataClass", "type": "object"}, "textanonymizationResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "emvista": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "privateai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}}, "title": "textanonymizationResponseModel", "type": "object"}, "textchatChatDataClass": {"properties": {"generated_text": {"title": "Generated Text", "type": "string"}, "message": {"items": {"$ref": "#/components/schemas/ChatMessageDataClass"}, "title": "Message", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["generated_text", "status"], "title": "textchatChatDataClass", "type": "object"}, "textchatChatRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "nullable": true, "default": "", "description": "Start your conversation here..."}, "chatbot_global_action": {"type": "string", "nullable": true, "default": "", "description": "A system message that helps set the behavior of the assistant. For example, 'You are a helpful assistant'."}, "previous_history": {"type": "array", "items": {"$ref": "#/components/schemas/ChatMessageRequest"}, "description": "A list containing all the previous conversations between the user and the chatbot AI. Each item in the list should be a dictionary with two keys: 'role' and 'message'. The 'role' key specifies the role of the speaker and can have the values 'user' or 'assistant'. The 'message' key contains the text of the conversation from the respective role. For example: [{'role': 'user', 'message': 'Hello'}, {'role': 'assistant', 'message': 'Hi, how can I help you?'}, ...]. This format allows easy identification of the speaker's role and their corresponding message."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 4096, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "tool_choice": {"allOf": [{"$ref": "#/components/schemas/ToolChoiceEnum"}], "default": "auto", "description": "`auto`: the model will choose to use tools if needed, `required`: force model to use any of the available tools, `none`: force model to not select a tool\n\n* `auto` - auto\n* `required` - required\n* `none` - none"}, "available_tools": {"type": "array", "items": {"$ref": "#/components/schemas/ChatAvailableToolsRequest"}, "description": "A list of tools the model may generate the right arguments for."}, "tool_results": {"type": "array", "items": {"$ref": "#/components/schemas/ChatToolResultRequest"}, "description": "List of results obtained from applying the tool_call arguments to your own tool."}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}}, "required": ["providers"]}, "textchatChatStreamRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "nullable": true, "default": "", "description": "Start your conversation here..."}, "chatbot_global_action": {"type": "string", "nullable": true, "default": "", "description": "A system message that helps set the behavior of the assistant. For example, 'You are a helpful assistant'."}, "previous_history": {"type": "array", "items": {"$ref": "#/components/schemas/ChatMessageRequest"}, "description": "A list containing all the previous conversations between the user and the chatbot AI. Each item in the list should be a dictionary with two keys: 'role' and 'message'. The 'role' key specifies the role of the speaker and can have the values 'user' or 'assistant'. The 'message' key contains the text of the conversation from the respective role. For example: [{'role': 'user', 'message': 'Hello'}, {'role': 'assistant', 'message': 'Hi, how can I help you?'}, ...]. This format allows easy identification of the speaker's role and their corresponding message."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 4096, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "tool_choice": {"allOf": [{"$ref": "#/components/schemas/ToolChoiceEnum"}], "default": "auto", "description": "`auto`: the model will choose to use tools if needed, `required`: force model to use any of the available tools, `none`: force model to not select a tool\n\n* `auto` - auto\n* `required` - required\n* `none` - none"}, "available_tools": {"type": "array", "items": {"$ref": "#/components/schemas/ChatAvailableToolsRequest"}, "description": "A list of tools the model may generate the right arguments for."}, "tool_results": {"type": "array", "items": {"$ref": "#/components/schemas/ChatToolResultRequest"}, "description": "List of results obtained from applying the tool_call arguments to your own tool."}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "fallback_type": {"allOf": [{"$ref": "#/components/schemas/FallbackTypeEnum"}], "default": "continue"}}, "required": ["providers"]}, "textchatResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "together_ai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "meta": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "groq": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "deepseek": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "replicate": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "perplexityai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}}, "title": "textchatResponseModel", "type": "object"}, "textcode_generationCodeGenerationDataClass": {"properties": {"generated_text": {"title": "Generated Text", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["generated_text", "status"], "title": "textcode_generationCodeGenerationDataClass", "type": "object"}, "textcode_generationCodeGenerationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "prompt": {"type": "string", "nullable": true, "default": "", "description": "Entrer the source code that will be used as a context."}, "instruction": {"type": "string", "minLength": 1, "description": "Entrer the instruction you want to be followed."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 1000, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}}, "required": ["instruction", "providers"]}, "textcode_generationResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/textcode_generationCodeGenerationDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textcode_generationCodeGenerationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textcode_generationCodeGenerationDataClass"}], "default": null}}, "title": "textcode_generationResponseModel", "type": "object"}, "textembeddingsEmbeddingsDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/EmbeddingDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "textembeddingsEmbeddingsDataClass", "type": "object"}, "textembeddingsEmbeddingsRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "texts": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of texts to transform into embeddings."}, "dimensions": {"type": "integer", "minimum": 1, "nullable": true, "description": " parameter to control the size of the output embedding vecto"}}, "required": ["providers", "texts"]}, "textembeddingsResponseModel": {"properties": {"ai21labs": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "iointelligence": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "jina": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}}, "title": "textembeddingsResponseModel", "type": "object"}, "textemotion_detectionEmotionDetectionDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/EmotionItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "textemotion_detectionEmotionDetectionDataClass", "type": "object"}, "textemotion_detectionEmotionDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}}, "required": ["providers", "text"]}, "textemotion_detectionResponseModel": {"properties": {"vernai": {"allOf": [{"$ref": "#/components/schemas/textemotion_detectionEmotionDetectionDataClass"}], "default": null}}, "title": "textemotion_detectionResponseModel", "type": "object"}, "textentity_sentimentEntitySentimentDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/Entity"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["items", "status"], "title": "textentity_sentimentEntitySentimentDataClass", "type": "object"}, "textentity_sentimentEntitySentimentRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}}, "required": ["providers", "text"]}, "textentity_sentimentResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/textentity_sentimentEntitySentimentDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textentity_sentimentEntitySentimentDataClass"}], "default": null}}, "title": "textentity_sentimentResponseModel", "type": "object"}, "textkeyword_extractionKeywordExtractionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/InfosKeywordExtractionDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "textkeyword_extractionKeywordExtractionDataClass", "type": "object"}, "textkeyword_extractionResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "emvista": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "corticalio": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}}, "title": "textkeyword_extractionResponseModel", "type": "object"}, "textmoderationModerationDataClass": {"properties": {"nsfw_likelihood": {"title": "Nsfw Likelihood", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/TextModerationItem"}, "title": "Items", "type": "array"}, "nsfw_likelihood_score": {"title": "Nsfw Likelihood Score", "type": "integer"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["nsfw_likelihood", "nsfw_likelihood_score", "status"], "title": "textmoderationModerationDataClass", "type": "object"}, "textmoderationResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/textmoderationModerationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textmoderationModerationDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textmoderationModerationDataClass"}], "default": null}}, "title": "textmoderationResponseModel", "type": "object"}, "textnamed_entity_recognitionNamedEntityRecognitionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/InfosNamedEntityRecognitionDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "textnamed_entity_recognitionNamedEntityRecognitionDataClass", "type": "object"}, "textnamed_entity_recognitionResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}}, "title": "textnamed_entity_recognitionResponseModel", "type": "object"}, "textplagia_detectionPlagiaDetectionDataClass": {"properties": {"plagia_score": {"title": "Plagia Score", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/PlagiaDetectionItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["plagia_score", "status"], "title": "textplagia_detectionPlagiaDetectionDataClass", "type": "object"}, "textplagia_detectionPlagiaDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "A text content on which a plagiarism detection analysis will be run"}, "title": {"type": "string", "nullable": true, "default": "", "description": "Content title"}}, "required": ["providers", "text"]}, "textplagia_detectionResponseModel": {"properties": {"winstonai": {"allOf": [{"$ref": "#/components/schemas/textplagia_detectionPlagiaDetectionDataClass"}], "default": null}, "originalityai": {"allOf": [{"$ref": "#/components/schemas/textplagia_detectionPlagiaDetectionDataClass"}], "default": null}}, "title": "textplagia_detectionResponseModel", "type": "object"}, "textprompt_optimizationPromptOptimizationDataClass": {"properties": {"missing_information": {"title": "Missing Information", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/PromptDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["missing_information", "status"], "title": "textprompt_optimizationPromptOptimizationDataClass", "type": "object"}, "textprompt_optimizationPromptOptimizationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Description of the desired prompt."}, "target_provider": {"type": "string", "minLength": 1, "description": "Select the provider for the prompt optimization"}}, "required": ["providers", "target_provider", "text"]}, "textprompt_optimizationResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/textprompt_optimizationPromptOptimizationDataClass"}], "default": null}}, "title": "textprompt_optimizationResponseModel", "type": "object"}, "textsentiment_analysisResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "emvista": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "sapling": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "ibm": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}}, "title": "textsentiment_analysisResponseModel", "type": "object"}, "textsentiment_analysisSentimentAnalysisDataClass": {"properties": {"general_sentiment": {"allOf": [{"$ref": "#/components/schemas/GeneralSentimentEnum"}], "title": "General Sentiment"}, "general_sentiment_rate": {"maximum": 1.0, "minimum": 0.0, "title": "General Sentiment Rate", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/SegmentSentimentAnalysisDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["general_sentiment", "general_sentiment_rate", "status"], "title": "textsentiment_analysisSentimentAnalysisDataClass", "type": "object"}, "textspell_checkResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "prowritingaid": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "sapling": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}}, "title": "textspell_checkResponseModel", "type": "object"}, "textspell_checkSpellCheckDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/SpellCheckItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "textspell_checkSpellCheckDataClass", "type": "object"}, "textspell_checkSpellCheckRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}}, "required": ["providers", "text"]}, "textsummarizeResponseModel": {"properties": {"oneai": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "emvista": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "writesonic": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "meaningcloud": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "alephalpha": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}}, "title": "textsummarizeResponseModel", "type": "object"}, "textsummarizeSummarizeDataClass": {"properties": {"result": {"title": "Result", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["result", "status"], "title": "textsummarizeSummarizeDataClass", "type": "object"}, "textsummarizeSummarizeRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}, "output_sentences": {"type": "integer", "minimum": 1, "default": 1}}, "required": ["providers", "text"]}, "texttopic_extractionResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}}, "title": "texttopic_extractionResponseModel", "type": "object"}, "texttopic_extractionTopicExtractionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/ExtractedTopic"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "texttopic_extractionTopicExtractionDataClass", "type": "object"}, "texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}}, "required": ["providers", "text"]}, "translationautomatic_translationAutomaticTranslationDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "translationautomatic_translationAutomaticTranslationDataClass", "type": "object"}, "translationautomatic_translationAutomaticTranslationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "source_language": {"type": "string", "nullable": true, "description": "Source language code (ex: en, fr)"}, "target_language": {"type": "string", "minLength": 1, "description": "Target language code (ex: en, fr)"}}, "required": ["providers", "target_language", "text"]}, "translationautomatic_translationResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "modernmt": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "deepl": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}}, "title": "translationautomatic_translationResponseModel", "type": "object"}, "translationdocument_translationDocumentTranslationDataClass": {"properties": {"file": {"title": "File", "type": "string"}, "document_resource_url": {"title": "Document Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["file", "document_resource_url", "status"], "title": "translationdocument_translationDocumentTranslationDataClass", "type": "object"}, "translationdocument_translationDocumentTranslationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "source_language": {"type": "string", "nullable": true, "description": "Source language code (ex: en, fr)"}, "target_language": {"type": "string", "minLength": 1, "description": "Target language code (ex: en, fr)"}}, "required": ["providers", "target_language"]}, "translationdocument_translationResponseModel": {"properties": {"deepl": {"allOf": [{"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationDataClass"}], "default": null}}, "title": "translationdocument_translationResponseModel", "type": "object"}, "translationlanguage_detectionLanguageDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/InfosLanguageDetectionDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "translationlanguage_detectionLanguageDetectionDataClass", "type": "object"}, "translationlanguage_detectionLanguageDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}}, "required": ["providers", "text"]}, "translationlanguage_detectionResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "modernmt": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}}, "title": "translationlanguage_detectionResponseModel", "type": "object"}, "videodeepfake_detection_asyncDeepfakeDetectionAsyncDataClass": {"properties": {"average_score": {"maximum": 1.0, "minimum": 0.0, "title": "Average Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/PredictionB20Enum"}], "title": "Prediction"}, "details_per_frame": {"items": {"$ref": "#/components/schemas/DetailPerFrame"}, "title": "Details Per Frame", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["average_score", "prediction", "id", "final_status"], "title": "videodeepfake_detection_asyncDeepfakeDetectionAsyncDataClass", "type": "object"}, "videodeepfake_detection_asyncModel": {"properties": {"sightengine": {"allOf": [{"$ref": "#/components/schemas/videodeepfake_detection_asyncDeepfakeDetectionAsyncDataClass"}], "default": null}}, "title": "videodeepfake_detection_asyncModel", "type": "object"}, "videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass": {"properties": {"moderation": {"items": {"$ref": "#/components/schemas/ContentNSFW"}, "title": "Moderation", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass", "type": "object"}, "videoexplicit_content_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass"}], "default": null}}, "title": "videoexplicit_content_detection_asyncModel", "type": "object"}, "videoface_detection_asyncFaceDetectionAsyncDataClass": {"properties": {"faces": {"items": {"$ref": "#/components/schemas/VideoFace"}, "title": "Faces", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoface_detection_asyncFaceDetectionAsyncDataClass", "type": "object"}, "videoface_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoface_detection_asyncFaceDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoface_detection_asyncFaceDetectionAsyncDataClass"}], "default": null}}, "title": "videoface_detection_asyncModel", "type": "object"}, "videogeneration_asyncGenerationAsyncDataClass": {"properties": {"video": {"title": "Video", "type": "string"}, "video_resource_url": {"title": "Video Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["video", "video_resource_url", "id", "final_status"], "title": "videogeneration_asyncGenerationAsyncDataClass", "type": "object"}, "videogeneration_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "minimax": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "bytedance": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}}, "title": "videogeneration_asyncModel", "type": "object"}, "videolabel_detection_asyncLabelDetectionAsyncDataClass": {"properties": {"labels": {"items": {"$ref": "#/components/schemas/VideoLabel"}, "title": "Labels", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videolabel_detection_asyncLabelDetectionAsyncDataClass", "type": "object"}, "videolabel_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videolabel_detection_asyncLabelDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videolabel_detection_asyncLabelDetectionAsyncDataClass"}], "default": null}}, "title": "videolabel_detection_asyncModel", "type": "object"}, "videologo_detection_asyncLogoDetectionAsyncDataClass": {"properties": {"logos": {"items": {"$ref": "#/components/schemas/LogoTrack"}, "title": "Logos", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videologo_detection_asyncLogoDetectionAsyncDataClass", "type": "object"}, "videologo_detection_asyncModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/videologo_detection_asyncLogoDetectionAsyncDataClass"}], "default": null}, "twelvelabs": {"allOf": [{"$ref": "#/components/schemas/videologo_detection_asyncLogoDetectionAsyncDataClass"}], "default": null}}, "title": "videologo_detection_asyncModel", "type": "object"}, "videoobject_tracking_asyncModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/videoobject_tracking_asyncObjectTrackingAsyncDataClass"}], "default": null}}, "title": "videoobject_tracking_asyncModel", "type": "object"}, "videoobject_tracking_asyncObjectTrackingAsyncDataClass": {"properties": {"objects": {"items": {"$ref": "#/components/schemas/ObjectTrack"}, "title": "Objects", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoobject_tracking_asyncObjectTrackingAsyncDataClass", "type": "object"}, "videoperson_tracking_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoperson_tracking_asyncPersonTrackingAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoperson_tracking_asyncPersonTrackingAsyncDataClass"}], "default": null}}, "title": "videoperson_tracking_asyncModel", "type": "object"}, "videoperson_tracking_asyncPersonTrackingAsyncDataClass": {"properties": {"persons": {"items": {"$ref": "#/components/schemas/VideoTrackingPerson"}, "title": "Persons", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoperson_tracking_asyncPersonTrackingAsyncDataClass", "type": "object"}, "videoquestion_answerQuestionAnswerDataClass": {"properties": {"answer": {"title": "Answer", "type": "string"}, "finish_reason": {"title": "Finish Reason", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["answer", "finish_reason", "status"], "title": "videoquestion_answerQuestionAnswerDataClass", "type": "object"}, "videoquestion_answerQuestionAnswerRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mp4|mpeg|mov|avi|x\\-flx|mpg|webm|wmv|3gpp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "maximum": 3000000, "minimum": 1, "description": "The maximum number of tokens to generate in the completion. This value, combined with the token count of your prompt, cannot exceed the model's context length."}, "text": {"type": "string", "minLength": 1, "description": "Question about the video"}}, "required": ["providers", "text"]}, "videoquestion_answerResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerDataClass"}], "default": null}}, "title": "videoquestion_answerResponseModel", "type": "object"}, "videoquestion_answer_asyncModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/videoquestion_answer_asyncQuestionAnswerAsyncDataClass"}], "default": null}}, "title": "videoquestion_answer_asyncModel", "type": "object"}, "videoquestion_answer_asyncQuestionAnswerAsyncDataClass": {"properties": {"answer": {"title": "Answer", "type": "string"}, "finish_reason": {"title": "Finish Reason", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["answer", "finish_reason", "id", "final_status"], "title": "videoquestion_answer_asyncQuestionAnswerAsyncDataClass", "type": "object"}, "videoshot_change_detection_asyncModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/videoshot_change_detection_asyncShotChangeDetectionAsyncDataClass"}], "default": null}}, "title": "videoshot_change_detection_asyncModel", "type": "object"}, "videoshot_change_detection_asyncShotChangeDetectionAsyncDataClass": {"properties": {"shotAnnotations": {"items": {"$ref": "#/components/schemas/ShotFrame"}, "title": "Shotannotations", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoshot_change_detection_asyncShotChangeDetectionAsyncDataClass", "type": "object"}, "videotext_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videotext_detection_asyncTextDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videotext_detection_asyncTextDetectionAsyncDataClass"}], "default": null}, "twelvelabs": {"allOf": [{"$ref": "#/components/schemas/videotext_detection_asyncTextDetectionAsyncDataClass"}], "default": null}}, "title": "videotext_detection_asyncModel", "type": "object"}, "videotext_detection_asyncTextDetectionAsyncDataClass": {"properties": {"texts": {"items": {"$ref": "#/components/schemas/VideoText"}, "title": "Texts", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videotext_detection_asyncTextDetectionAsyncDataClass", "type": "object"}}, "securitySchemes": {"FeatureApiAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"}, "WorkflowShareApiAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"}, "jwtAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"}}}, "servers": [{"url": "https://api.edenai.run/v2"}], "security": [{"FeatureApiAuth": []}]} \ No newline at end of file +{"openapi": "3.0.3", "info": {"title": "Eden AI", "version": "2.0", "description": "Your project description"}, "paths": {"/{feature}/{subfeature}/batch/{name}/": {"get": {"operationId": "feature_batch_retrieve", "description": "Return paginated response of requests with their status and their\nresponses if the request succeeded or errror if failed", "summary": "Get Batch Job Result", "parameters": [{"in": "path", "name": "feature", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "name", "schema": {"type": "string", "minLength": 1}}, {"in": "query", "name": "page", "schema": {"type": "integer", "minimum": 0}}, {"in": "query", "name": "public_id", "schema": {"type": "integer", "minimum": 0}}, {"in": "query", "name": "status", "schema": {"enum": ["succeeded", "failed", "finished", "processing"], "type": "string", "minLength": 1}, "description": "* `succeeded` - Status Succeeded\n* `failed` - Status Failed\n* `finished` - Status Finished\n* `processing` - Status Processing"}, {"in": "path", "name": "subfeature", "schema": {"type": "string"}, "required": true}], "tags": ["Batch"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedBatchResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}, "post": {"operationId": "feature_batch_create", "description": "\nLaunch a async Batch job, given a job name that will be used as its id.\n\nEach request should have the same parameters as you would normally pass to a feature.\n\n\nYou can also pass an optional paramater `name` to help better identify each requests you send.\n\n\nExample with `text`/`sentiment_analysis`:\n\n```json\n\"requests\": [\n {\n \"text\": \"It's -25 outside and I am so hot.\",\n \"language\": \"en\",\n \"providers\": \"google,amazon\"\n },\n {\n \"name\": \"mixed\",\n \"text\": \"Overall I am satisfied with my experience at Amazon, but two areas of major improvement needed.\",\n \"language\": \"en\",\n \"providers\": \"google\"\n },\n ...\n]\n```\n\n\n### Limitations:\nYou can have up to `5` concurrent running Jobs & input up to `500` requests per Batch\n \n\n
Available Features\n\n\n\n|Feature Name|Subfeature Name|\n|------------|---------------|\n|`translation`|`language_detection`|\n|`image`|`face_detection`|\n|`text`|`chat`|\n|`ocr`|`receipt_parser`|\n|`image`|`anonymization`|\n|`audio`|`text_to_speech`|\n|`translation`|`document_translation`|\n|`image`|`logo_detection`|\n|`image`|`landmark_detection`|\n|`image`|`background_removal`|\n|`text`|`topic_extraction`|\n|`text`|`embeddings`|\n|`text`|`custom_classification`|\n|`text`|`anonymization`|\n|`text`|`summarize`|\n|`text`|`custom_named_entity_recognition`|\n|`text`|`generation`|\n|`text`|`code_generation`|\n|`text`|`moderation`|\n|`image`|`generation`|\n|`translation`|`automatic_translation`|\n|`text`|`search`|\n|`text`|`spell_check`|\n|`image`|`object_detection`|\n|`ocr`|`identity_parser`|\n|`image`|`explicit_content`|\n|`ocr`|`invoice_parser`|\n|`ocr`|`resume_parser`|\n|`audio`|`speech_to_text_async`|\n|`ocr`|`ocr_tables_async`|\n|`video`|`text_detection_async`|\n|`text`|`question_answer`|\n|`text`|`keyword_extraction`|\n|`text`|`named_entity_recognition`|\n|`text`|`syntax_analysis`|\n|`text`|`sentiment_analysis`|\n\n
\n\n", "summary": "Launch Batch Job", "parameters": [{"in": "path", "name": "feature", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "subfeature", "schema": {"type": "string"}, "required": true}], "tags": ["Batch"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BatchRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BatchLaunchResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}, "delete": {"operationId": "feature_batch_destroy", "description": "Api view with custom pagination method to return paginated response from any queryset", "summary": "Delete Batch Job", "parameters": [{"in": "path", "name": "feature", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "subfeature", "schema": {"type": "string"}, "required": true}], "tags": ["Batch"], "security": [{"FeatureApiAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/": {"get": {"operationId": "aiproducts_aiproducts_list", "summary": "List Projects", "parameters": [{"in": "query", "name": "project_type", "schema": {"type": "string"}}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/AIProject"}}}}, "description": ""}}}}, "/aiproducts/{project_id}/": {"get": {"operationId": "aiproducts_aiproducts_retrieve", "summary": "Retrieve Project", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AIProject"}}}, "description": ""}, "404": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_create", "description": "Allows you to create a new Ask YODA project with specified details.\n\n
Default Embedding Models\n\n|Name|Value|\n|----|-----|\n|**openai**|`text-embedding-3-large`|\n|**cohere**|`embed-multilingual-v3.0`|\n|**google**|`text-multilingual-embedding-002`|\n|**mistral**|`mistral-embed`|\n|**jina**|`jina-embeddings-v3`|\n\n
", "summary": "Create Project", "tags": ["Custom Chatbot (Create operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskYourDataProjectRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaCreateProjectResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/{prompt_name}/": {"patch": {"operationId": "aiproducts_aiproducts_askyoda_v2_partial_update", "description": "Update the bot prompt for your RAG project using an existing prompt.\nThis endpoint allows you to change the current bot prompt to a different existing prompt.", "summary": "Update Bot Prompt", "parameters": [{"in": "query", "name": "file_urls", "schema": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, {"in": "query", "name": "model", "schema": {"type": "string", "minLength": 1}, "description": "The model used to call the prompt. E.g. openai/gpt-4o", "required": true}, {"in": "query", "name": "name", "schema": {"type": "string", "minLength": 1}, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed.", "required": true}, {"in": "query", "name": "params", "schema": {}, "description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "prompt_name", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "system_prompt", "schema": {"type": "string", "minLength": 1}, "description": "Specify a system prompt for the LLM"}, {"in": "query", "name": "text", "schema": {"type": "string", "minLength": 1}, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```", "required": true}], "tags": ["Custom Chatbot (Update operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_file/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_file_create", "description": "This endpoint enables you to upload files (**CSV**, **AUDIO**, **PDF**, or **XML**) into your project.
Upon upload, the file will be processed and stored as text embeddings within the project's database.
\n\n### Supported File Types\n- **CSV**: Comma-separated values files.\n- **Audio**: Supported audio formats for transcription to text.\n- **PDF**: Portable Document Format files.\n- **XML**: Extensible Markup Language files.\n\n### OCR Processing and Speech-to-Text\nIf the uploaded file is a PDF, it will undergo Optical Character Recognition (OCR) processing using the default provider chosen during the project creation step.
\nSimilarly, audio files will be converted to text using Speech-to-Text, also utilizing the default provider configured during project setup.\n\n### Accepted File Extensions\n- **PDF**\n- **CSV**\n- **AMR**\n- **FLAC**\n- **WAV**\n- **OGG**\n- **MP3**\n- **MP4**\n- **WEBM**\n- **XML**\n\n\n", "summary": "Add File", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddFileRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/AddFileRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/AddFileRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_image/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_image_create", "description": "\nThis endpoint enables you to upload images into your project.
Upon upload, the file will be processed and stored as text embeddings within the project's database.\n", "summary": "Add Image", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/AddImageRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_text/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_text_create", "description": "Add text data in your project, which will be stored as embeddings\nwithin your chosen database provider.", "summary": "Add Texts", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddTextRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_url/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_url_create", "description": "Add a list of URLs into your projects,\nthey will be processed and stored as text embeddings within your project.", "summary": "Add Urls", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddUrlRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/add_video/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_add_video_create", "description": "\nThis endpoint enables you to upload videos into your project.
Upon upload, the file will be processed and stored as text embeddings within the project's database.\n", "summary": "Add Video", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Upload operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AddVideoRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/AddVideoRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/ask_llm/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_ask_llm_create", "description": "Retrieve a list of search query responses and compare them to your\ninput. Provide a query, and in return, receive scores for the most relevant items from your project,\nranked by their proximity to your query.", "summary": "Ask LLM", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Query operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskLLMRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaQueryResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/conversations/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_list", "summary": "List Conversations", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/Conversation"}}}}, "description": ""}}}, "post": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_create", "summary": "Create Conversation", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Create operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/ConversationRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ConversationRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Conversation"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/conversations/{conversation_id}/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_retrieve", "summary": "Retrieve Conversation Details", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetail"}}}, "description": ""}}}, "put": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_update", "summary": "Update Conversation Name", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Update operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetailRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/ConversationDetailRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ConversationDetailRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetail"}}}, "description": ""}}}, "patch": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_partial_update", "summary": "Update Conversation Name", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Update operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedConversationDetailRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedConversationDetailRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedConversationDetailRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ConversationDetail"}}}, "description": ""}}}, "delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_conversations_destroy", "summary": "Delete Conversation", "parameters": [{"in": "path", "name": "conversation_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/create_bot_prompt/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_create_bot_prompt_create", "description": "Create a default bot prompt for your RAG project.\nThe bot prompt serves as the default system message or personality definition for your RAG-powered chatbot. This prompt helps define how your bot should interact with users and what context it should consider when processing queries.", "summary": "Create Bot Profile", "parameters": [{"in": "query", "name": "file_urls", "schema": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, {"in": "query", "name": "model", "schema": {"type": "string", "minLength": 1}, "description": "The model used to call the prompt. E.g. openai/gpt-4o", "required": true}, {"in": "query", "name": "name", "schema": {"type": "string", "minLength": 1}, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed.", "required": true}, {"in": "query", "name": "params", "schema": {}, "description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "query", "name": "system_prompt", "schema": {"type": "string", "minLength": 1}, "description": "Specify a system prompt for the LLM"}, {"in": "query", "name": "text", "schema": {"type": "string", "minLength": 1}, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```", "required": true}], "tags": ["Custom Chatbot (Create operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/delete_all_chunks/": {"delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_delete_all_chunks_destroy", "description": "Delete all chunks from your project.\nThis action is irreversible and will remove all data stored in your project.", "summary": "Delete All Chunks", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaDeleteResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/delete_chunk/": {"delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_delete_chunk_destroy", "description": "Delete one or multiple chunks from your project. \nYou can delete a single chunk by providing it ID either as a query parameter or in the request body. \nFor bulk deletion, provide a list of chunk IDs in the request body.\n\nExamples:\n- Single chunk deletion (query parameter): DELETE /chunks?id=chunk123\n- Single chunk deletion (body): {\"id\": \"chunk123\"}\n- Bulk deletion: {\"ids\": [\"chunk123\", \"chunk456\", \"chunk789\"]}", "summary": "Delete Chunk", "parameters": [{"in": "query", "name": "chunk_ids", "schema": {"type": "array", "items": {"type": "string", "minLength": 1, "description": "chunk_id"}}, "description": "List of chunk_ids to delete"}, {"in": "query", "name": "id", "schema": {"type": "string", "minLength": 1}, "description": "chunk_id"}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaDeleteResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/files/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_files_list", "summary": "List Files", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/AiProductFile"}}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/files/{file_id}/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_files_retrieve", "summary": "Get File", "parameters": [{"in": "path", "name": "file_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AiProductFile"}}}, "description": ""}}}, "delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_files_destroy", "summary": "Delete File", "parameters": [{"in": "path", "name": "file_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/generate/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_generate_create", "description": "Interact with different LLM providers, this endpoint is based on the structure of the\nLLM API.", "summary": "Generate", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Query operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/GenerateRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaQueryResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/info/": {"get": {"operationId": "aiproducts_aiproducts_askyoda_v2_info_retrieve", "description": "Retrieve details about your project within your Ask YODA project,\nincluding the total number of items stored in your project collection and default models", "summary": "Get info", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaInfoResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/list_chunks_ids/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_list_chunks_ids_create", "description": "Retrieve a list of all chunk IDs stored in your project.", "summary": "List Check IDs", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Info operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListChunkRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaListChunksIdsResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/query/": {"post": {"operationId": "aiproducts_aiproducts_askyoda_v2_query_create", "description": "Interact with your data by selecting your preferred Language Model \nprovider. The chosen provider will then respond to queries based on the data you have added\nto your collection", "summary": "Query", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Query operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskLLMRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/YodaAskLlmResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/aiproducts/askyoda/v2/{project_id}/remove_bot_prompt/": {"delete": {"operationId": "aiproducts_aiproducts_askyoda_v2_remove_bot_prompt_destroy", "description": "Remove the bot prompt from your RAG project.\n This endpoint removes the custom bot prompt from your project, effectively resetting the bot's personality to system defaults. This action cannot be undone, but you can always set a new bot prompt later.", "summary": "Remove Bot Prompt", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/aiproducts/askyoda/v2/{project_id}/update_project/": {"patch": {"operationId": "aiproducts_aiproducts_askyoda_v2_update_project_partial_update", "description": "Update the default settings of the Yoda project.\nIt allows you to modify the project's default settings,\nspecifically changing the llm_provider (language model provider),\nllm_model (language model), ocr_provider (upload pdf), speech_to_text provider (upload audio)\nand the default chunks parameter associated with the default project.", "summary": "Update Project", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Update operations)"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedAskYodaProjectUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AskYodaProjectUpdate"}}}, "description": ""}}}}, "/aiproducts/delete/{project_id}/": {"delete": {"operationId": "aiproducts_aiproducts_delete_destroy", "description": "View to delete an AI project.", "summary": "Delete Project", "parameters": [{"in": "path", "name": "project_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Custom Chatbot (Delete operations)"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/audio/speech_to_text_async/": {"get": {"operationId": "audio_audio_speech_to_text_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Speech to Text List Jobs", "tags": ["Speech To Text Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "audio_audio_speech_to_text_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|0.024 (per 60 seconde)|15 seconde\n|**google**|-|`v1p1beta1`|0.024 (per 60 seconde)|1 seconde\n|**microsoft**|-|`v1.0`|0.0168 (per 60 seconde)|1 seconde\n|**assembly**|-|`v2`|0.011 (per 60 seconde)|1 seconde\n|**deepgram**|**nova-3**|`v1`|0.0052 (per 60 seconde)|1 seconde\n|**deepgram**|**enhanced**|`v1`|0.0189 (per 60 seconde)|1 seconde\n|**deepgram**|-|`v1`|0.0189 (per 60 seconde)|1 seconde\n|**deepgram**|**base**|`v1`|0.0169 (per 60 seconde)|1 seconde\n|**openai**|-|`boto3 (v1.15.18)`|0.006 (per 60 seconde)|1 seconde\n|**gladia**|-|`v1`|0.0102 (per 60 seconde)|1 seconde\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Bashkir**|`ba`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hebrew**|`he`|\n|**Hebrew**|`iw`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Interlingua (International Auxiliary Language Association)**|`ia`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lingala**|`ln`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Mandarin Chinese**|`cmn`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Occitan (post 1500)**|`oc`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`mo`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Serbian**|`sr`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wu Chinese**|`wuu`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n|**at**|`at`|\n|**jp**|`jp`|\n|**mymr**|`mymr`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Afrikaans (South Africa)**|`af-ZA`|\n|**Albanian (Albania)**|`sq-AL`|\n|**Amharic (Ethiopia)**|`am-ET`|\n|**Arabic (Algeria)**|`ar-DZ`|\n|**Arabic (Bahrain)**|`ar-BH`|\n|**Arabic (Egypt)**|`ar-EG`|\n|**Arabic (Iraq)**|`ar-IQ`|\n|**Arabic (Israel)**|`ar-IL`|\n|**Arabic (Jordan)**|`ar-JO`|\n|**Arabic (Kuwait)**|`ar-KW`|\n|**Arabic (Lebanon)**|`ar-LB`|\n|**Arabic (Libya)**|`ar-LY`|\n|**Arabic (Mauritania)**|`ar-MR`|\n|**Arabic (Morocco)**|`ar-MA`|\n|**Arabic (Oman)**|`ar-OM`|\n|**Arabic (Palestinian Territories)**|`ar-PS`|\n|**Arabic (Qatar)**|`ar-QA`|\n|**Arabic (Saudi Arabia)**|`ar-SA`|\n|**Arabic (Syria)**|`ar-SY`|\n|**Arabic (Tunisia)**|`ar-TN`|\n|**Arabic (United Arab Emirates)**|`ar-AE`|\n|**Arabic (Yemen)**|`ar-YE`|\n|**Armenian (Armenia)**|`hy-AM`|\n|**Azerbaijani (Azerbaijan)**|`az-AZ`|\n|**Bangla (Bangladesh)**|`bn-BD`|\n|**Bangla (India)**|`bn-IN`|\n|**Basque (Spain)**|`eu-ES`|\n|**Bosnian (Bosnia & Herzegovina)**|`bs-BA`|\n|**Bulgarian (Bulgaria)**|`bg-BG`|\n|**Burmese (Myanmar (Burma))**|`my-MM`|\n|**Cantonese (China)**|`yue-CN`|\n|**Cantonese (Traditional, Hong Kong SAR China)**|`yue-Hant-HK`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Hong Kong SAR China)**|`zh-HK`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Croatian (Croatia)**|`hr-HR`|\n|**Czech (Czechia)**|`cs-CZ`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Belgium)**|`nl-BE`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (Australia)**|`en-AU`|\n|**English (Canada)**|`en-CA`|\n|**English (Ghana)**|`en-GH`|\n|**English (Hong Kong SAR China)**|`en-HK`|\n|**English (India)**|`en-IN`|\n|**English (Ireland)**|`en-IE`|\n|**English (Kenya)**|`en-KE`|\n|**English (New Zealand)**|`en-NZ`|\n|**English (Nigeria)**|`en-NG`|\n|**English (Pakistan)**|`en-PK`|\n|**English (Philippines)**|`en-PH`|\n|**English (Singapore)**|`en-SG`|\n|**English (South Africa)**|`en-ZA`|\n|**English (Tanzania)**|`en-TZ`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United Kingdom)**|`en-UK`|\n|**English (United States)**|`en-US`|\n|**Estonian (Estonia)**|`et-EE`|\n|**Filipino (Philippines)**|`fil-PH`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (Belgium)**|`fr-BE`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**Galician (Spain)**|`gl-ES`|\n|**Georgian (Georgia)**|`ka-GE`|\n|**German (Austria)**|`de-AT`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Greek (Greece)**|`el-GR`|\n|**Gujarati (India)**|`gu-IN`|\n|**Hebrew (Israel)**|`he-IL`|\n|**Hebrew (Israel)**|`iw-IL`|\n|**Hindi (India)**|`hi-IN`|\n|**Hindi (Latin)**|`hi-Latn`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Icelandic (Iceland)**|`is-IS`|\n|**Indonesian (Indonesia)**|`id-ID`|\n|**Irish (Ireland)**|`ga-IE`|\n|**Italian (Italy)**|`it-IT`|\n|**Italian (Switzerland)**|`it-CH`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Javanese (Indonesia)**|`jv-ID`|\n|**Kannada (India)**|`kn-IN`|\n|**Kazakh (Kazakhstan)**|`kk-KZ`|\n|**Khmer (Cambodia)**|`km-KH`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Lao (Laos)**|`lo-LA`|\n|**Latvian (Latvia)**|`lv-LV`|\n|**Lithuanian (Lithuania)**|`lt-LT`|\n|**Macedonian (North Macedonia)**|`mk-MK`|\n|**Malay (Malaysia)**|`ms-MY`|\n|**Malayalam (India)**|`ml-IN`|\n|**Maltese (Malta)**|`mt-MT`|\n|**Marathi (India)**|`mr-IN`|\n|**Mongolian (Mongolia)**|`mn-MN`|\n|**Nepali (Nepal)**|`ne-NP`|\n|**Norwegian (Norway)**|`no-NO`|\n|**Norwegian Bokm\u00e5l (Norway)**|`nb-NO`|\n|**Pashto (Afghanistan)**|`ps-AF`|\n|**Persian (Iran)**|`fa-IR`|\n|**Polish (Poland)**|`pl-PL`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Punjabi (India)**|`pa-Guru-IN`|\n|**Romanian (Romania)**|`ro-RO`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Serbia)**|`sr-RS`|\n|**Sinhala (Sri Lanka)**|`si-LK`|\n|**Slovak (Slovakia)**|`sk-SK`|\n|**Slovenian (Slovenia)**|`sl-SI`|\n|**Somali (Somalia)**|`so-SO`|\n|**Spanish (Argentina)**|`es-AR`|\n|**Spanish (Bolivia)**|`es-BO`|\n|**Spanish (Chile)**|`es-CL`|\n|**Spanish (Colombia)**|`es-CO`|\n|**Spanish (Costa Rica)**|`es-CR`|\n|**Spanish (Cuba)**|`es-CU`|\n|**Spanish (Dominican Republic)**|`es-DO`|\n|**Spanish (Ecuador)**|`es-EC`|\n|**Spanish (El Salvador)**|`es-SV`|\n|**Spanish (Equatorial Guinea)**|`es-GQ`|\n|**Spanish (Guatemala)**|`es-GT`|\n|**Spanish (Honduras)**|`es-HN`|\n|**Spanish (Latin America)**|`es-419`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Nicaragua)**|`es-NI`|\n|**Spanish (Panama)**|`es-PA`|\n|**Spanish (Paraguay)**|`es-PY`|\n|**Spanish (Peru)**|`es-PE`|\n|**Spanish (Puerto Rico)**|`es-PR`|\n|**Spanish (Spain)**|`es-ES`|\n|**Spanish (United States)**|`es-US`|\n|**Spanish (Uruguay)**|`es-UY`|\n|**Spanish (Venezuela)**|`es-VE`|\n|**Sundanese (Indonesia)**|`su-ID`|\n|**Swahili (Kenya)**|`sw-KE`|\n|**Swahili (Tanzania)**|`sw-TZ`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Tamil (India)**|`ta-IN`|\n|**Tamil (Malaysia)**|`ta-MY`|\n|**Tamil (Singapore)**|`ta-SG`|\n|**Tamil (Sri Lanka)**|`ta-LK`|\n|**Telugu (India)**|`te-IN`|\n|**Thai (Thailand)**|`th-TH`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Ukrainian (Ukraine)**|`uk-UA`|\n|**Urdu (India)**|`ur-IN`|\n|**Urdu (Pakistan)**|`ur-PK`|\n|**Uzbek (Uzbekistan)**|`uz-UZ`|\n|**Vietnamese (Vietnam)**|`vi-VN`|\n|**Welsh (United Kingdom)**|`cy-GB`|\n|**Wu Chinese (China)**|`wuu-CN`|\n|**Zulu (South Africa)**|`zu-ZA`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**deepgram**|`enhanced`|\n\n
", "summary": "Speech to Text Launch Job", "tags": ["Speech To Text Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/SpeechToTextAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/SpeechToTextAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "audio_audio_speech_to_text_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Speech to text delete Jobs", "tags": ["Speech To Text Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/audio/speech_to_text_async/{public_id}/": {"get": {"operationId": "audio_audio_speech_to_text_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Speech to Text Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Speech To Text Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncaudiospeech_to_text_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/audio/text_to_speech/": {"post": {"operationId": "audio_audio_text_to_speech_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|4.0 (per 1000000 char)|1 char\n|**amazon**|**Neural**|`boto3 (v1.15.18)`|16.0 (per 1000000 char)|1 char\n|**google**|-|`v1`|4.0 (per 1000000 char)|1 char\n|**google**|**Standard**|`v1`|4.0 (per 1000000 char)|1 char\n|**google**|**Neural**|`v1`|16.0 (per 1000000 char)|1 char\n|**google**|**Wavenet**|`v1`|16.0 (per 1000000 char)|1 char\n|**google**|**Studio**|`v1`|0.16 (per 1000 char)|1 char\n|**google**|**gemini-2.5-flash-tts**|`v1`|10.0 (per 1000000 char)|1 char\n|**google**|**gemini-2.5-pro-tts**|`v1`|20.0 (per 1000000 char)|None char\n|**microsoft**|-|`v1.0`|16.0 (per 1000000 char)|1 char\n|**lovoai**|-|`v1`|160.0 (per 1000000 char)|1000 char\n|**elevenlabs**|-|`v1`|0.3 (per 1000 char)|1 char\n|**openai**|-|`v1.0`|0.015 (per 1000 char)|1 char\n|**deepgram**|-|`v1`|0.015 (per 1000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Macedonian**|`mk`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Mandarin Chinese**|`cmn`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Serbian**|`sr`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Standard Arabic**|`arb`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wu Chinese**|`wuu`|\n|**Xhosa**|`xh`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans (South Africa)**|`af-ZA`|\n|**Albanian (Albania)**|`sq-AL`|\n|**Amharic (Ethiopia)**|`am-ET`|\n|**Arabic (Algeria)**|`ar-DZ`|\n|**Arabic (Bahrain)**|`ar-BH`|\n|**Arabic (Egypt)**|`ar-EG`|\n|**Arabic (Iraq)**|`ar-IQ`|\n|**Arabic (Jordan)**|`ar-JO`|\n|**Arabic (Kuwait)**|`ar-KW`|\n|**Arabic (Lebanon)**|`ar-LB`|\n|**Arabic (Libya)**|`ar-LY`|\n|**Arabic (Morocco)**|`ar-MA`|\n|**Arabic (Oman)**|`ar-OM`|\n|**Arabic (Pseudo-Accents)**|`ar-XA`|\n|**Arabic (Qatar)**|`ar-QA`|\n|**Arabic (Saudi Arabia)**|`ar-SA`|\n|**Arabic (Syria)**|`ar-SY`|\n|**Arabic (Tunisia)**|`ar-TN`|\n|**Arabic (United Arab Emirates)**|`ar-AE`|\n|**Arabic (Yemen)**|`ar-YE`|\n|**Armenian (Armenia)**|`hy-AM`|\n|**Azerbaijani (Azerbaijan)**|`az-AZ`|\n|**Bangla (Bangladesh)**|`bn-BD`|\n|**Bangla (India)**|`bn-IN`|\n|**Basque (Spain)**|`eu-ES`|\n|**Bosnian (Bosnia & Herzegovina)**|`bs-BA`|\n|**Bulgarian (Bulgaria)**|`bg-BG`|\n|**Burmese (Myanmar (Burma))**|`my-MM`|\n|**Cantonese (China)**|`yue-CN`|\n|**Cantonese (Hong Kong SAR China)**|`yue-HK`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-CN-henan`|\n|**Chinese (China)**|`zh-CN-shandong`|\n|**Chinese (China)**|`zh-CN-sichuan`|\n|**Chinese (Hong Kong SAR China)**|`zh-HK`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Croatian (Croatia)**|`hr-HR`|\n|**Czech (Czechia)**|`cs-CZ`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Belgium)**|`nl-BE`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (Australia)**|`en-AU`|\n|**English (Canada)**|`en-CA`|\n|**English (Cura\u00e7ao)**|`en-AN`|\n|**English (Hong Kong SAR China)**|`en-HK`|\n|**English (India)**|`en-IN`|\n|**English (Ireland)**|`en-IE`|\n|**English (Kenya)**|`en-KE`|\n|**English (New Zealand)**|`en-NZ`|\n|**English (Nigeria)**|`en-NG`|\n|**English (Philippines)**|`en-PH`|\n|**English (Singapore)**|`en-SG`|\n|**English (South Africa)**|`en-ZA`|\n|**English (Tanzania)**|`en-TZ`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**Estonian (Estonia)**|`et-EE`|\n|**Filipino (Philippines)**|`fil-PH`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (Belgium)**|`fr-BE`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**Galician (Spain)**|`gl-ES`|\n|**Georgian (Georgia)**|`ka-GE`|\n|**German (Austria)**|`de-AT`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Greek (Greece)**|`el-GR`|\n|**Gujarati (India)**|`gu-IN`|\n|**Hebrew (Israel)**|`he-IL`|\n|**Hindi (India)**|`hi-IN`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Icelandic (Iceland)**|`is-IS`|\n|**Indonesian (Indonesia)**|`id-ID`|\n|**Irish (Ireland)**|`ga-IE`|\n|**Italian (Italy)**|`it-IT`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Javanese (Indonesia)**|`jv-ID`|\n|**Kannada (India)**|`kn-IN`|\n|**Kazakh (Kazakhstan)**|`kk-KZ`|\n|**Khmer (Cambodia)**|`km-KH`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Lao (Laos)**|`lo-LA`|\n|**Latvian (Latvia)**|`lv-LV`|\n|**Lithuanian (Lithuania)**|`lt-LT`|\n|**Macedonian (North Macedonia)**|`mk-MK`|\n|**Malay (Malaysia)**|`ms-MY`|\n|**Malayalam (India)**|`ml-IN`|\n|**Maltese (Malta)**|`mt-MT`|\n|**Mandarin Chinese (China)**|`cmn-CN`|\n|**Mandarin Chinese (Taiwan)**|`cmn-TW`|\n|**Marathi (India)**|`mr-IN`|\n|**Mongolian (Mongolia)**|`mn-MN`|\n|**Nepali (Nepal)**|`ne-NP`|\n|**Norwegian Bokm\u00e5l (Norway)**|`nb-NO`|\n|**Pashto (Afghanistan)**|`ps-AF`|\n|**Persian (Iran)**|`fa-IR`|\n|**Polish (Poland)**|`pl-PL`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Punjabi (India)**|`pa-IN`|\n|**Romanian (Romania)**|`ro-RO`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Serbia)**|`sr-RS`|\n|**Sinhala (Sri Lanka)**|`si-LK`|\n|**Slovak (Slovakia)**|`sk-SK`|\n|**Slovenian (Slovenia)**|`sl-SI`|\n|**Somali (Somalia)**|`so-SO`|\n|**Spanish (Argentina)**|`es-AR`|\n|**Spanish (Bolivia)**|`es-BO`|\n|**Spanish (Chile)**|`es-CL`|\n|**Spanish (Colombia)**|`es-CO`|\n|**Spanish (Costa Rica)**|`es-CR`|\n|**Spanish (Cuba)**|`es-CU`|\n|**Spanish (Dominican Republic)**|`es-DO`|\n|**Spanish (Ecuador)**|`es-EC`|\n|**Spanish (El Salvador)**|`es-SV`|\n|**Spanish (Equatorial Guinea)**|`es-GQ`|\n|**Spanish (Guatemala)**|`es-GT`|\n|**Spanish (Honduras)**|`es-HN`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Nicaragua)**|`es-NI`|\n|**Spanish (Panama)**|`es-PA`|\n|**Spanish (Paraguay)**|`es-PY`|\n|**Spanish (Peru)**|`es-PE`|\n|**Spanish (Puerto Rico)**|`es-PR`|\n|**Spanish (Spain)**|`es-ES`|\n|**Spanish (United States)**|`es-US`|\n|**Spanish (Uruguay)**|`es-UY`|\n|**Spanish (Venezuela)**|`es-VE`|\n|**Sundanese (Indonesia)**|`su-ID`|\n|**Swahili (Kenya)**|`sw-KE`|\n|**Swahili (Tanzania)**|`sw-TZ`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Tamil (India)**|`ta-IN`|\n|**Tamil (Malaysia)**|`ta-MY`|\n|**Tamil (Singapore)**|`ta-SG`|\n|**Tamil (Sri Lanka)**|`ta-LK`|\n|**Telugu (India)**|`te-IN`|\n|**Thai (Thailand)**|`th-TH`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Ukrainian (Ukraine)**|`uk-UA`|\n|**Urdu (India)**|`ur-IN`|\n|**Urdu (Pakistan)**|`ur-PK`|\n|**Uzbek (United Kingdom)**|`uz-UK`|\n|**Uzbek (Uzbekistan)**|`uz-UZ`|\n|**Vietnamese (Vietnam)**|`vi-VN`|\n|**Welsh (United Kingdom)**|`cy-GB`|\n|**Wu Chinese (China)**|`wuu-CN`|\n|**Xhosa (South Africa)**|`xh-ZA`|\n|**Zulu (South Africa)**|`zu-ZA`|\n\n

Supported Models

\n\n", "summary": "Text to Speech", "tags": ["Text To Speech"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/audiotext_to_speechResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/audio/text_to_speech_async/": {"get": {"operationId": "audio_audio_text_to_speech_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Text To Speech list jobs", "tags": ["Text To Speech Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "audio_audio_text_to_speech_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**lovoai**|-|`v1`|0.16 (per 1000 char)|1 char\n|**amazon**|-|`boto3 (v1.15.18)`|4.0 (per 1000000 char)|1 char\n|**amazon**|**Neural**|`boto3 (v1.15.18)`|16.0 (per 1000000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Macedonian**|`mk`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Mandarin Chinese**|`cmn`|\n|**Marathi**|`mr`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Serbian**|`sr`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Standard Arabic**|`arb`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wu Chinese**|`wuu`|\n|**Xhosa**|`xh`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans (South Africa)**|`af-ZA`|\n|**Albanian (Albania)**|`sq-AL`|\n|**Amharic (Ethiopia)**|`am-ET`|\n|**Arabic (Algeria)**|`ar-DZ`|\n|**Arabic (Bahrain)**|`ar-BH`|\n|**Arabic (Egypt)**|`ar-EG`|\n|**Arabic (Iraq)**|`ar-IQ`|\n|**Arabic (Jordan)**|`ar-JO`|\n|**Arabic (Kuwait)**|`ar-KW`|\n|**Arabic (Lebanon)**|`ar-LB`|\n|**Arabic (Libya)**|`ar-LY`|\n|**Arabic (Morocco)**|`ar-MA`|\n|**Arabic (Oman)**|`ar-OM`|\n|**Arabic (Qatar)**|`ar-QA`|\n|**Arabic (Saudi Arabia)**|`ar-SA`|\n|**Arabic (Tunisia)**|`ar-TN`|\n|**Arabic (United Arab Emirates)**|`ar-AE`|\n|**Armenian (Armenia)**|`hy-AM`|\n|**Azerbaijani (Azerbaijan)**|`az-AZ`|\n|**Bangla (India)**|`bn-IN`|\n|**Basque (Spain)**|`eu-ES`|\n|**Bosnian (Bosnia & Herzegovina)**|`bs-BA`|\n|**Burmese (Myanmar (Burma))**|`my-MM`|\n|**Cantonese (China)**|`yue-CN`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-CN-henan`|\n|**Chinese (China)**|`zh-CN-shandong`|\n|**Chinese (China)**|`zh-CN-sichuan`|\n|**Chinese (Hong Kong SAR China)**|`zh-HK`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Croatian (Croatia)**|`hr-HR`|\n|**Czech (Czechia)**|`cs-CZ`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Belgium)**|`nl-BE`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (Australia)**|`en-AU`|\n|**English (Canada)**|`en-CA`|\n|**English (Cura\u00e7ao)**|`en-AN`|\n|**English (Hong Kong SAR China)**|`en-HK`|\n|**English (India)**|`en-IN`|\n|**English (Ireland)**|`en-IE`|\n|**English (Kenya)**|`en-KE`|\n|**English (New Zealand)**|`en-NZ`|\n|**English (Nigeria)**|`en-NG`|\n|**English (Philippines)**|`en-PH`|\n|**English (Singapore)**|`en-SG`|\n|**English (South Africa)**|`en-ZA`|\n|**English (Tanzania)**|`en-TZ`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**Estonian (Estonia)**|`et-EE`|\n|**Filipino (Philippines)**|`fil-PH`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (Belgium)**|`fr-BE`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**Galician (Spain)**|`gl-ES`|\n|**Georgian (Georgia)**|`ka-GE`|\n|**German (Austria)**|`de-AT`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Hebrew (Israel)**|`he-IL`|\n|**Hindi (India)**|`hi-IN`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Icelandic (Iceland)**|`is-IS`|\n|**Indonesian (Indonesia)**|`id-ID`|\n|**Irish (Ireland)**|`ga-IE`|\n|**Italian (Italy)**|`it-IT`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Javanese (Indonesia)**|`jv-ID`|\n|**Kazakh (Kazakhstan)**|`kk-KZ`|\n|**Khmer (Cambodia)**|`km-KH`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Lao (Laos)**|`lo-LA`|\n|**Latvian (Latvia)**|`lv-LV`|\n|**Lithuanian (Lithuania)**|`lt-LT`|\n|**Macedonian (North Macedonia)**|`mk-MK`|\n|**Malay (Malaysia)**|`ms-MY`|\n|**Malayalam (India)**|`ml-IN`|\n|**Maltese (Malta)**|`mt-MT`|\n|**Mandarin Chinese (China)**|`cmn-CN`|\n|**Marathi (India)**|`mr-IN`|\n|**Mongolian (Mongolia)**|`mn-MN`|\n|**Nepali (Nepal)**|`ne-NP`|\n|**Norwegian Bokm\u00e5l (Norway)**|`nb-NO`|\n|**Pashto (Afghanistan)**|`ps-AF`|\n|**Persian (Iran)**|`fa-IR`|\n|**Polish (Poland)**|`pl-PL`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Romanian (Romania)**|`ro-RO`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Serbia)**|`sr-RS`|\n|**Sinhala (Sri Lanka)**|`si-LK`|\n|**Slovak (Slovakia)**|`sk-SK`|\n|**Slovenian (Slovenia)**|`sl-SI`|\n|**Somali (Somalia)**|`so-SO`|\n|**Spanish (Argentina)**|`es-AR`|\n|**Spanish (Bolivia)**|`es-BO`|\n|**Spanish (Chile)**|`es-CL`|\n|**Spanish (Colombia)**|`es-CO`|\n|**Spanish (Costa Rica)**|`es-CR`|\n|**Spanish (Cuba)**|`es-CU`|\n|**Spanish (Ecuador)**|`es-EC`|\n|**Spanish (El Salvador)**|`es-SV`|\n|**Spanish (Equatorial Guinea)**|`es-GQ`|\n|**Spanish (Guatemala)**|`es-GT`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Nicaragua)**|`es-NI`|\n|**Spanish (Panama)**|`es-PA`|\n|**Spanish (Paraguay)**|`es-PY`|\n|**Spanish (Puerto Rico)**|`es-PR`|\n|**Spanish (Spain)**|`es-ES`|\n|**Spanish (United States)**|`es-US`|\n|**Spanish (Uruguay)**|`es-UY`|\n|**Spanish (Venezuela)**|`es-VE`|\n|**Sundanese (Indonesia)**|`su-ID`|\n|**Swahili (Kenya)**|`sw-KE`|\n|**Swahili (Tanzania)**|`sw-TZ`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Tamil (India)**|`ta-IN`|\n|**Tamil (Malaysia)**|`ta-MY`|\n|**Tamil (Singapore)**|`ta-SG`|\n|**Telugu (India)**|`te-IN`|\n|**Thai (Thailand)**|`th-TH`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Ukrainian (Ukraine)**|`uk-UA`|\n|**Urdu (India)**|`ur-IN`|\n|**Urdu (Pakistan)**|`ur-PK`|\n|**Uzbek (United Kingdom)**|`uz-UK`|\n|**Vietnamese (Vietnam)**|`vi-VN`|\n|**Welsh (United Kingdom)**|`cy-GB`|\n|**Wu Chinese (China)**|`wuu-CN`|\n|**Xhosa (South Africa)**|`xh-ZA`|\n|**Zulu (South Africa)**|`zu-ZA`|\n\n

Supported Models

\n\n", "summary": "Text to Speech launch job", "tags": ["Text To Speech Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/TextToSpeechAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/TextToSpeechAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "audio_audio_text_to_speech_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Text To Speech delete Jobs", "tags": ["Text To Speech Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/audio/text_to_speech_async/{public_id}/": {"get": {"operationId": "audio_audio_text_to_speech_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Text To Speech Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Text To Speech Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncaudiotext_to_speech_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/batch/": {"get": {"operationId": "batch_batch_list", "summary": "List Batch Jobs", "tags": ["Batch"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/BatchList"}}}}, "description": ""}}}}, "/cost_management/": {"get": {"operationId": "cost_management_cost_management_retrieve", "summary": "Monitor Consumptions", "parameters": [{"in": "query", "name": "begin", "schema": {"type": "string", "format": "date"}, "required": true}, {"in": "query", "name": "end", "schema": {"type": "string", "format": "date"}, "required": true}, {"in": "query", "name": "provider", "schema": {"type": "string", "minLength": 1, "maxLength": 200}}, {"in": "query", "name": "rag_project_id", "schema": {"type": "string", "format": "uuid"}}, {"in": "query", "name": "step", "schema": {"type": "integer", "maximum": 4, "minimum": 1}, "required": true}, {"in": "query", "name": "subfeature", "schema": {"type": "string", "minLength": 1, "maxLength": 200}}, {"in": "query", "name": "token", "schema": {"type": "string", "minLength": 1}}, {"in": "query", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}}], "tags": ["Cost Monitoring"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CostMonitoringResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/cost_management/credits/": {"get": {"operationId": "cost_management_", "description": "Get you current credits", "summary": "My Credits", "tags": ["Cost Monitoring"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/credits_serializer"}}}, "description": ""}}}}, "/enabled-features/": {"get": {"operationId": "enabled-features_enabled_features_retrieve", "description": "List all enabled features.", "tags": ["enabled-features"], "security": [{"jwtAuth": []}, {}], "responses": {"200": {"description": "No response body"}}}}, "/image/ai_detection/": {"post": {"operationId": "image_image_ai_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**winstonai**|`v1`|0.021 (per 1 request)|1 request\n\n\n
\n\n", "summary": "AI Detection", "tags": ["Ai Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageai_detectionAiDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageai_detectionAiDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageai_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/anonymization/": {"post": {"operationId": "image_image_anonymization_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**api4ai**|`v1.0.0`|25.0 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Anonymization", "tags": ["Anonymization"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/create_project/": {"post": {"operationId": "image_image_automl_classification_create_project_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Automl Classification - Create Project", "tags": ["Automl Classification"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationcreate_projectAutomlClassificationCreateProjectRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationcreate_projectAutomlClassificationCreateProjectRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/delete_project/": {"post": {"operationId": "image_image_automl_classification_delete_project_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Automl Classification - Delete Project", "tags": ["Automl Classification"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationdelete_projectAutomlClassificationDeleteRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationdelete_projectAutomlClassificationDeleteRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/list_projects/": {"get": {"operationId": "image_image_automl_classification_list_projects_retrieve", "description": "List Automl Classification Projects", "summary": "Automl Classification - List Projects", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationListProjectsResponse"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/predict_async/": {"get": {"operationId": "image_image_automl_classification_predict_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Automl Classification Predict List Job", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "image_image_automl_classification_predict_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|0.005 (per 1 request)|1 request\n\n\n
\n\n", "summary": "Automl Classification Predict Launch Job", "tags": ["Automl Classification"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AutomlClassificationPredictRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationPredictRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}}, "/image/automl_classification/predict_async/{public_id}/": {"get": {"operationId": "image_image_automl_classification_predict_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Automl Classification - Predict Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/train_async/": {"get": {"operationId": "image_image_automl_classification_train_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Automl Classification Train List Job", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "image_image_automl_classification_train_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Automl Classification Train Launch Job", "tags": ["Automl Classification"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AutomlClassificationTrainRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationTrainRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}}, "/image/automl_classification/train_async/{public_id}/": {"get": {"operationId": "image_image_automl_classification_train_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Automl Classification - Train Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/automl_classification/upload_data_async/": {"get": {"operationId": "image_image_automl_classification_upload_data_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Automl Classification Upload Data List Job", "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "image_image_automl_classification_upload_data_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**nyckel**|`v1.0.0`|0.0005 (per 1 file)|1 file\n\n\n
\n\n", "summary": "Automl Classification Upload Data Launch Job", "tags": ["Automl Classification"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AutomlClassificationUploadDataRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AutomlClassificationUploadDataRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}}, "/image/automl_classification/upload_data_async/{public_id}/": {"get": {"operationId": "image_image_automl_classification_upload_data_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Automl Classification - Upload Data Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Automl Classification"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageautoml_classificationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/background_removal/": {"post": {"operationId": "image_image_background_removal_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**api4ai**|`v1.0.0`|50.0 (per 1000 file)|1 file\n|**photoroom**|`v1`|20.0 (per 1000 file)|1 file\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**stabilityai**|`v2Beta`|0.02 (per 1 request)|1 request\n|**clipdrop**|`v1Beta`|0.5 (per 1 request)|1 request\n|**picsart**|`1.0`|0.04 (per 1 image)|1 image\n\n\n
\n\n", "summary": "Background Removal", "tags": ["Background Removal"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagebackground_removalResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/deepfake_detection/": {"post": {"operationId": "image_image_deepfake_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sightengine**|`v1beta`|0.002 (per 1 image)|1 image\n\n\n
\n\n", "summary": "Deepfake Detection", "tags": ["Deepfake Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagedeepfake_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/embeddings/": {"post": {"operationId": "image_image_embeddings_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`v1`|0.06 (per 1000 file)|1 file\n|**google**|**gemini-embedding-001**|`v1`|0.0 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Italian**|`it`|\n|**Spanish**|`es`|\n\n

Supported Models

\n\n", "summary": "Embeddings", "tags": ["Embeddings"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageembeddingsEmbeddingsRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageembeddingsEmbeddingsRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageembeddingsResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/explicit_content/": {"post": {"operationId": "image_image_explicit_content_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000 file)|1 file\n|**clarifai**|-|`8.0.0`|2.0 (per 1000 file)|1 file\n|**google**|-|`v1`|1.5 (per 1000 file)|1 file\n|**microsoft**|-|`v3.2`|1.0 (per 1000 file)|1 file\n|**sentisight**|-|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**openai**|**gpt-4o**|`v1`|24.0 (per 1000 file)|1 file\n|**openai**|-|`v1`|24.0 (per 1000 file)|1 file\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Explicit Content Detection", "tags": ["Explicit Content"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageexplicit_contentResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_compare/": {"post": {"operationId": "image_image_face_compare_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**base64**|`latest`|0.25 (per 1 request)|1 request\n|**facepp**|`v3`|2.0 (per 1000 request)|1 request\n|**amazon**|`boto3 (v1.15.18)`|1.0 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Comparison", "tags": ["Face Compare"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_compareFaceCompareRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_compareFaceCompareRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_compareResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_detection/": {"post": {"operationId": "image_image_face_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|1.0 (per 1000 file)|1 file\n|**clarifai**|`8.0.0`|2.0 (per 1000 file)|1 file\n|**google**|`v1`|1.5 (per 1000 file)|1 file\n|**api4ai**|`v1.0.0`|0.75 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Face Detection", "tags": ["Face Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_detectionFaceDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_detectionFaceDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/add_face/": {"post": {"operationId": "image_image_face_recognition_add_face_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|1.0 (per 1000 image)|1 image\n|**facepp**|`v3`|0.6 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - Add Face", "tags": ["Face Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionadd_faceFaceRecognitionAddFaceRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_recognitionadd_faceFaceRecognitionAddFaceRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/delete_face/": {"post": {"operationId": "image_image_face_recognition_delete_face_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|free|-\n|**facepp**|`v3`|0.1 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - Delete Face", "tags": ["Face Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitiondelete_faceFaceRecognitionDeleteFaceRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/list_faces/": {"get": {"operationId": "image_image_face_recognition_list_faces_retrieve", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|free|-\n|**facepp**|`v3`|0.1 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - List Faces", "parameters": [{"in": "query", "name": "attributes_as_list", "schema": {"type": "boolean", "default": false}, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, {"in": "query", "name": "fallback_providers", "schema": {"type": "array", "items": {"type": "string"}, "default": [], "maxItems": 5}, "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n "}, {"in": "query", "name": "providers", "schema": {"type": "array", "items": {"type": "string", "minLength": 1}}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)", "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, {"in": "query", "name": "settings", "schema": {"type": "string", "default": {}}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}], "tags": ["Face Recognition"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/face_recognition/recognize/": {"post": {"operationId": "image_image_face_recognition_recognize_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|1.0 (per 1000 file)|1 file\n|**facepp**|`v3`|2.0 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Face Recognition - Recognize Face", "tags": ["Face Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionrecognizeFaceRecognitionDetectFaceRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageface_recognitionrecognizeFaceRecognitionDetectFaceRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageface_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/generation/": {"post": {"operationId": "image_image_generation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Resolution|Price|Billing unit|\n|----|----|-------|------|-----|------------|\n|**openai**|**dall-e-3**|`v1Beta`|`1024x1024`|0.04 (per 1 image)|1 image\n|**openai**|**dall-e-3**|`v1Beta`|`512x512`|0.04 (per 1 image)|1 image\n|**openai**|**dall-e-2**|`v1Beta`|`256x256`|0.016 (per 1 image)|1 image\n|**openai**|**dall-e-2**|`v1Beta`|`512x512`|0.018 (per 1 image)|1 image\n|**openai**|**dall-e-2**|`v1Beta`|`1024x1024`|0.02 (per 1 image)|1 image\n|**openai**|**dall-e-3**|`v1Beta`|`1024x1792`|0.08 (per 1 image)|1 image\n|**openai**|**dall-e-3**|`v1Beta`|`1792x1024`|0.08 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`1024x1024`|0.04 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`512x512`|0.018 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`1024x1792`|0.08 (per 1 image)|1 image\n|**openai**|-|`v1Beta`|`1792x1024`|0.08 (per 1 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1024x1024`|15.0 (per 1000 image)|1 image\n|**stabilityai**|-|`v1Beta`|`1024x1024`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1152x896`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`896x1152`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1216x832`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1344x768`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`768x1344`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`1536x640`|15.0 (per 1000 image)|1 image\n|**stabilityai**|**stable-diffusion-xl-1024-v1-0**|`v1Beta`|`640x1536`|15.0 (per 1000 image)|1 image\n|**replicate**|**anime-style**|`v1`|`1024x1024`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**anime-style**|`v1`|`256x256`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|-|`v1`|-|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**classic**|`v1`|`512x512`|0.00115 (per 1 exec_time)|1 exec_time\n|**replicate**|**anime-style**|`v1`|`512x512`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**vintedois-diffusion**|`v1`|`512x512`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**vintedois-diffusion**|`v1`|`1024x1024`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**vintedois-diffusion**|`v1`|`256x256`|0.000225 (per 1 exec_time)|1 exec_time\n|**replicate**|**classic**|`v1`|`1024x1024`|0.00115 (per 1 exec_time)|1 exec_time\n|**replicate**|**classic**|`v1`|`256x256`|0.00115 (per 1 exec_time)|1 exec_time\n|**leonardo**|**Leonardo Phoenix**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**Leonardo Phoenix**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Lightning XL**|`v1`|`512x512`|0.011 (per 1 image)|1 image\n|**leonardo**|**Leonardo Lightning XL**|`v1`|`1024x1024`|0.012 (per 1 image)|1 image\n|**leonardo**|**Leonardo Anime XL**|`v1`|`512x512`|0.011 (per 1 image)|1 image\n|**leonardo**|**Leonardo Anime XL**|`v1`|`1024x1024`|0.012 (per 1 image)|1 image\n|**leonardo**|**Leonardo Kino XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Kino XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**Leonardo Vision XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Vision XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**Leonardo Diffusion XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**Leonardo Diffusion XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**AlbedoBase XL**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**AlbedoBase XL**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|**SDXL 0.9**|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|**SDXL 0.9**|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**leonardo**|-|`v1`|`512x512`|0.014 (per 1 image)|1 image\n|**leonardo**|-|`v1`|`1024x1024`|0.017 (per 1 image)|1 image\n|**minimax**|**image-01**|`v1`|-|0.0035 (per 1 image)|1 image\n|**minimax**|-|`v1`|-|0.0035 (per 1 image)|1 image\n|**bytedance**|**seedream-5-0-260128**|`v3`|-|0.035 (per 1 token)|1 token\n|**bytedance**|**seedream-4-0-250828**|`v3`|-|0.03 (per 1 request)|1 request\n|**bytedance**|**seedream-4-5-251128**|`v3`|-|0.03 (per 1 request)|1 request\n|**bytedance**|-|`v3`|-|0.03 (per 1 request)|1 request\n|**bytedance**|**seedream-3-0-t2i-250415**|`v3`|-|0.03 (per 1 request)|1 request\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`dall-e-3`|\n|**stabilityai**|`stable-diffusion-xl-1024-v1-0`|\n|**replicate**|`classic`|\n|**leonardo**|`Leonardo Phoenix`|\n|**minimax**|`image-01`|\n|**bytedance**|`seedream-3-0-t2i-250415`|\n\n
", "summary": "Image generation", "tags": ["Generation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagegenerationGenerationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagegenerationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/landmark_detection/": {"post": {"operationId": "image_image_landmark_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`v1`|1.5 (per 1000 file)|1 file\n|**microsoft**|`v3.2`|1.0 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Landmark Detection", "tags": ["Landmark Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagelandmark_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/logo_detection/": {"post": {"operationId": "image_image_logo_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|-|`v1`|1.5 (per 1000 file)|1 file\n|**microsoft**|-|`v3.2`|1.0 (per 1000 file)|1 file\n|**api4ai**|-|`v1.0.0`|0.25 (per 1000 file)|1 file\n|**api4ai**|**v1**|`v1.0.0`|0.25 (per 1000 file)|1 file\n|**api4ai**|**v2**|`v1.0.0`|2.5 (per 1000 file)|1 file\n|**clarifai**|-|`8.0.0`|2.0 (per 1000 file)|1 file\n|**openai**|-|`v1`|24.0 (per 1000 file)|1 file\n|**openai**|**gpt-4o**|`v1`|24.0 (per 1000 file)|1 file\n|**openai**|**gpt-4-turbo**|`v1`|48.0 (per 1000 file)|1 file\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**api4ai**|`v1`|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Logo Detection", "tags": ["Logo Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagelogo_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/object_detection/": {"post": {"operationId": "image_image_object_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000 file)|1 file\n|**api4ai**|-|`1.9.2`|0.5 (per 1000 file)|1 file\n|**clarifai**|-|`8.0.0`|2.0 (per 1000 file)|1 file\n|**clarifai**|**general-image-detection**|`8.0.0`|2.0 (per 1000 file)|1 file\n|**google**|-|`v1`|2.25 (per 1000 file)|1 file\n|**microsoft**|-|`v3.2`|1.0 (per 1000 file)|1 file\n|**sentisight**|-|`v3.3.1`|0.75 (per 1000 file)|1 file\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**clarifai**|`general-image-detection`|\n\n
", "summary": "Object Detection", "tags": ["Object Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageobject_detectionObjectDetectionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imageobject_detectionObjectDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imageobject_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/question_answer/": {"post": {"operationId": "image_image_question_answer_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|**gpt-4-turbo**|`v1`|30.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1`|8.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**o1**|`v1`|60.0 (per 1000000 token)|1 token\n|**openai**|**o1-mini**|`v1`|12.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|0.6 (per 1000000 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Italian**|`it`|\n|**Spanish**|`es`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**google**|`gemini-1.5-pro`|\n\n
", "summary": "Question Answer", "tags": ["Question Answer"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagequestion_answerResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/delete_image/": {"post": {"operationId": "image_image_search_delete_image_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|free|-\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Search - Delete phase", "tags": ["Search"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchdelete_imageDeleteImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagesearchdelete_imageDeleteImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/get_image/": {"get": {"operationId": "image_image_search_get_image_retrieve", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|free|-\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Search - get image", "parameters": [{"in": "query", "name": "attributes_as_list", "schema": {"type": "boolean", "default": false}, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, {"in": "query", "name": "fallback_providers", "schema": {"type": "array", "items": {"type": "string"}, "default": [], "maxItems": 5}, "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n "}, {"in": "query", "name": "image_name", "schema": {"type": "string", "minLength": 1}, "required": true}, {"in": "query", "name": "providers", "schema": {"type": "array", "items": {"type": "string", "minLength": 1}}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)", "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, {"in": "query", "name": "settings", "schema": {"type": "string", "default": {}}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}], "tags": ["Search"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/get_images/": {"get": {"operationId": "image_image_search_get_images_retrieve", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|free|-\n|**nyckel**|`v1.0.0`|free|-\n\n\n
\n\n", "summary": "Search - list all images", "parameters": [{"in": "query", "name": "attributes_as_list", "schema": {"type": "boolean", "default": false}, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, {"in": "query", "name": "fallback_providers", "schema": {"type": "array", "items": {"type": "string"}, "default": [], "maxItems": 5}, "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n "}, {"in": "query", "name": "providers", "schema": {"type": "array", "items": {"type": "string", "minLength": 1}}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)", "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, {"in": "query", "name": "settings", "schema": {"type": "string", "default": {}}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}], "tags": ["Search"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/launch_similarity/": {"post": {"operationId": "image_image_search_launch_similarity_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**nyckel**|`v1.0.0`|1.0 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Search - launch similarity", "tags": ["Search"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchlaunch_similaritySearchImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagesearchlaunch_similaritySearchImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/image/search/upload_image/": {"post": {"operationId": "image_image_search_upload_image_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**nyckel**|`v1.0.0`|0.5 (per 1000 file)|1 file\n\n\n
\n\n", "summary": "Search - Upload Phase", "tags": ["Search"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchupload_imageUploadImageRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/imagesearchupload_imageUploadImageRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/imagesearchResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/info/provider_subfeatures/": {"get": {"operationId": "info_info_provider_subfeatures_list", "description": "List Provider and features relations : You can get a list of *all providers* for a *feature* or *all features* for a *given provider*.\n\nYou can have the detailed information on the **pricing**, **supported languages** as well as the **models** for providers who propose different models (eg: voice models available for a text to speech provider).\n\nExample : If you want the detailed list of all providers that can analyse the sentiment of a text written in french, you'd need to set `feature__name=text`, `subfeature__name=sentiment_analysis` and `languages=fr`.\n\nWhich will look like the following :\n\n\n```bash\ncurl --request GET https://api.edenai.run/v2/info/provider_subfeatures?subfeature__name=sentiment_analysis&feature__name=text&languages=fr\n```", "summary": "List Providers Subfeatures", "parameters": [{"in": "query", "name": "feature__name", "schema": {"type": "string"}}, {"in": "query", "name": "gender", "schema": {"type": "string"}, "description": "Accepts two values: either 'male' or 'female'. Used to filter models voices for the text_to_speech subfeature"}, {"in": "query", "name": "is_working", "schema": {"type": "boolean"}}, {"in": "query", "name": "language", "schema": {"type": "string"}, "description": "languages [icontains]"}, {"in": "query", "name": "phase__name", "schema": {"type": "string"}}, {"in": "query", "name": "provider__name", "schema": {"type": "string"}}, {"in": "query", "name": "subfeature__name", "schema": {"type": "string"}}], "tags": ["Infos"], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/ProviderSubfeature"}}}}, "description": ""}}}}, "/llm/chat/": {"post": {"operationId": "llm_llm_chat_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|**amazon.nova-lite-v1:0**|`llmengine (v2)`|0.24 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-micro-v1:0**|`llmengine (v2)`|0.14 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`llmengine (v2)`|3.2 (per 1000000 token)|1 token\n|**anthropic**|**claude-opus-4-6**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-20250514**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1-20250805**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5**|`v1`|2.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-20241022**|`v1`|4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-latest**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5-20251001**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5**|`v1`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20250219**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-haiku-20240307**|`v1`|1.25e-06 (per 1 token)|1 token\n|**anthropic**|**claude-4-opus-20250514**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-4-sonnet-20250514**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5-20250929**|`v1`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-20250514**|`v1`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5-20251101**|`v1`|2.5e-05 (per 1 token)|1 token\n|**cohere**|**command-r7b-12-2024**|`llmengine (v2)`|0.15 (per 1000000 token)|1 token\n|**cohere**|**command-r-08-2024**|`llmengine (v2)`|0.6 (per 1000000 token)|1 token\n|**deepseek**|**deepseek-chat**|`llmengine (v2)`|1.1e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-reasoner**|`llmengine (v2)`|2.19e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-coder**|`llmengine (v2)`|2.8e-07 (per 1 token)|1 token\n|**meta**|**meta.llama3-1-405b-instruct-v1:0**|`llmengine (v2)`|2.4 (per 1000000 token)|1 token\n|**meta**|**meta.llama3-1-70b-instruct-v1:0**|`llmengine (v2)`|0.72 (per 1000000 token)|1 token\n|**meta**|**meta.llama3-1-8b-instruct-v1:0**|`llmengine (v2)`|0.22 (per 1000000 token)|1 token\n|**mistral**|**magistral-medium-2506**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-2506**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-latest**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**pixtral-large-latest**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-small-latest**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**codestral-latest**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-latest**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2405**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2508**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**mistral**|**devstral-medium-2507**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**devstral-small-2505**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-small-2507**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**labs-devstral-small-2512**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-2512**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-2509**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-latest**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-latest**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2402**|`llmengine (v2)`|1.2e-05 (per 1 token)|1 token\n|**mistral**|**mistral-large-2407**|`llmengine (v2)`|9e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2411**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium**|`llmengine (v2)`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2312**|`llmengine (v2)`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2505**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**mistral**|**mistral-small**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**mistral-tiny**|`llmengine (v2)`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-7b**|`llmengine (v2)`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo-2407**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x22b**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x7b**|`llmengine (v2)`|7e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-12b-2409**|`llmengine (v2)`|1.5e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-large-2411**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`llmengine (v2)`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-pro**|`llmengine (v2)`|100.0 (per 1000000 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**o3-mini**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`llmengine (v2)`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`llmengine (v2)`|1.4e-05 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`llmengine (v2)`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`llmengine (v2)`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`llmengine (v2)`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`llmengine (v2)`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`llmengine (v2)`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-72B-Instruct-Turbo**|`llmengine (v2)`|1.2e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`llmengine (v2)`|1.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-72B-Instruct-Turbo**|`llmengine (v2)`|1.2 (per 1000000 token)|1 token\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-120b**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Thinking-2507**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-fp8-tput**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1**|`llmengine (v2)`|7e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1-0528-tput**|`llmengine (v2)`|2.19e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3**|`llmengine (v2)`|1.25e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3.1**|`llmengine (v2)`|1.7e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8**|`llmengine (v2)`|8.5e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Scout-17B-16E-Instruct**|`llmengine (v2)`|5.9e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo**|`llmengine (v2)`|3.5e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`llmengine (v2)`|1.8e-07 (per 1 token)|1 token\n|**together_ai**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-20b**|`llmengine (v2)`|2e-07 (per 1 token)|1 token\n|**together_ai**|**zai-org/GLM-4.5-Air-FP8**|`llmengine (v2)`|1.1e-06 (per 1 token)|1 token\n|**together_ai**|**zai-org/GLM-4.6**|`llmengine (v2)`|2.2e-06 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct-0905**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Instruct**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Thinking**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Instruct-2507-tput**|`llmengine (v2)`|6e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo**|`llmengine (v2)`|3.5e-06 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`llmengine (v2)`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`llmengine (v2)`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`llmengine (v2)`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`llmengine (v2)`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-image-preview**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-3.1-pro-preview**|`llmengine (v2)`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-3-pro-image-preview**|`llmengine (v2)`|12.0 (per 1000000 token)|1 token\n|**google**|**gemini-3-flash-preview**|`llmengine (v2)`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`llmengine (v2)`|0.0 (per 1 token)|1 token\n|**groq**|**llama-3.1-8b-instant**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-120b**|`v1`|7.5e-07 (per 1 token)|1 token\n|**groq**|**llama-3.3-70b-versatile**|`v1`|7.9e-07 (per 1 token)|1 token\n|**groq**|**llama-3.3-70b-versatile**|`v1`|7.9e-07 (per 1 token)|1 token\n|**groq**|**llama-3.1-8b-instant**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-guard-4-12b**|`v1`|2e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-maverick-17b-128e-instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-scout-17b-16e-instruct**|`v1`|3.4e-07 (per 1 token)|1 token\n|**groq**|**moonshotai/kimi-k2-instruct-0905**|`v1`|3e-06 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-20b**|`v1`|5e-07 (per 1 token)|1 token\n|**groq**|**qwen/qwen3-32b**|`v1`|5.9e-07 (per 1 token)|1 token\n|**microsoft**|**gpt-4o**|`Azure AI Foundry`|5.0 (per 1000000 token)|1 token\n|**microsoft**|**o3-mini**|`Azure AI Foundry`|4.4 (per 1000000 token)|1 token\n|**microsoft**|**o1-mini**|`Azure AI Foundry`|12.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-4o-mini**|`Azure AI Foundry`|0.66 (per 1000000 token)|1 token\n|**microsoft**|**gpt-4**|`Azure AI Foundry`|60.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-35-turbo-16k**|`Azure AI Foundry`|4.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-35-turbo**|`Azure AI Foundry`|1.5 (per 1000000 token)|1 token\n|**minimax**|**minimax-m1**|`v1`|2.2 (per 1000000 token)|1 token\n|**minimax**|**minimax-text-01**|`v1`|1.1 (per 1000000 token)|1 token\n|**minimax**|**MiniMax-M2.1**|`v1`|1.2e-06 (per 1 token)|1 token\n|**minimax**|**MiniMax-M2.1-lightning**|`v1`|2.4e-06 (per 1 token)|1 token\n|**minimax**|**MiniMax-M2**|`v1`|1.2e-06 (per 1 token)|1 token\n|**bytedance**|**seed-1-6-250915**|`llmengine (v2)`|2.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar**|`llmengine (v2)`|1.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar**|`llmengine (v2)`|1e-06 (per 1 token)|1 token\n|**perplexityai**|**sonar-pro**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**perplexityai**|**sonar-deep-research**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**perplexityai**|**sonar-pro**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**perplexityai**|**sonar-reasoning-pro**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**deepinfra**|**Gryphe/MythoMax-L2-13b**|`v1`|9e-08 (per 1 token)|1 token\n|**deepinfra**|**NousResearch/Hermes-3-Llama-3.1-405B**|`v1`|1e-06 (per 1 token)|1 token\n|**deepinfra**|**NousResearch/Hermes-3-Llama-3.1-70B**|`v1`|3e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/QwQ-32B**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen2.5-72B-Instruct**|`v1`|3.9e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen2.5-7B-Instruct**|`v1`|1e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen2.5-VL-32B-Instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-14B**|`v1`|2.4e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-235B-A22B**|`v1`|5.4e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-235B-A22B-Instruct-2507**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-235B-A22B-Thinking-2507**|`v1`|2.9e-06 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-30B-A3B**|`v1`|2.9e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-32B**|`v1`|2.8e-07 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-Coder-480B-A35B-Instruct**|`v1`|1.6e-06 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-Coder-480B-A35B-Instruct-Turbo**|`v1`|1.2e-06 (per 1 token)|1 token\n|**deepinfra**|**Qwen/Qwen3-Next-80B-A3B-Instruct**|`v1`|1.4e-06 (per 1 token)|1 token\n|**deepinfra**|**Sao10K/L3-8B-Lunaris-v1-Turbo**|`v1`|5e-08 (per 1 token)|1 token\n|**deepinfra**|**Sao10K/L3.1-70B-Euryale-v2.2**|`v1`|7.5e-07 (per 1 token)|1 token\n|**deepinfra**|**Sao10K/L3.3-70B-Euryale-v2.3**|`v1`|7.5e-07 (per 1 token)|1 token\n|**deepinfra**|**anthropic/claude-3-7-sonnet-latest**|`v1`|1.65e-05 (per 1 token)|1 token\n|**deepinfra**|**anthropic/claude-4-opus**|`v1`|8.25e-05 (per 1 token)|1 token\n|**deepinfra**|**anthropic/claude-4-sonnet**|`v1`|1.65e-05 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1**|`v1`|2.4e-06 (per 1 token)|1 token\n|**deepinfra**|**nvidia/Llama-3.3-Nemotron-Super-49B-v1.5**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-0528**|`v1`|2.15e-06 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-Distill-Llama-70B**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-Distill-Qwen-32B**|`v1`|2.7e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-Turbo**|`v1`|3e-06 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3**|`v1`|8.9e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3-0324**|`v1`|8.8e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3.1**|`v1`|1e-06 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-V3.1-Terminus**|`v1`|1e-06 (per 1 token)|1 token\n|**deepinfra**|**google/gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**google/gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**deepinfra**|**google/gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**deepinfra**|**google/gemma-3-12b-it**|`v1`|1e-07 (per 1 token)|1 token\n|**deepinfra**|**google/gemma-3-27b-it**|`v1`|1.6e-07 (per 1 token)|1 token\n|**deepinfra**|**google/gemma-3-4b-it**|`v1`|8e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.2-11B-Vision-Instruct**|`v1`|4.9e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.2-3B-Instruct**|`v1`|2e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.3-70B-Instruct**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`v1`|3.9e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-4-Scout-17B-16E-Instruct**|`v1`|3e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-Guard-3-8B**|`v1`|5.5e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-Guard-4-12B**|`v1`|1.8e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3-8B-Instruct**|`v1`|6e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-70B-Instruct**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`v1`|2.8e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-8B-Instruct**|`v1`|5e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`v1`|3e-08 (per 1 token)|1 token\n|**deepinfra**|**microsoft/WizardLM-2-8x22B**|`v1`|4.8e-07 (per 1 token)|1 token\n|**deepinfra**|**microsoft/phi-4**|`v1`|1.4e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-Nemo-Instruct-2407**|`v1`|4e-08 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-Small-24B-Instruct-2501**|`v1`|8e-08 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-Small-3.2-24B-Instruct-2506**|`v1`|2e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`v1`|4e-07 (per 1 token)|1 token\n|**deepinfra**|**moonshotai/Kimi-K2-Instruct**|`v1`|2e-06 (per 1 token)|1 token\n|**deepinfra**|**moonshotai/Kimi-K2-Instruct-0905**|`v1`|2e-06 (per 1 token)|1 token\n|**deepinfra**|**nvidia/Llama-3.1-Nemotron-70B-Instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**nvidia/NVIDIA-Nemotron-Nano-9B-v2**|`v1`|1.6e-07 (per 1 token)|1 token\n|**deepinfra**|**openai/gpt-oss-120b**|`v1`|4.5e-07 (per 1 token)|1 token\n|**deepinfra**|**openai/gpt-oss-20b**|`v1`|1.5e-07 (per 1 token)|1 token\n|**deepinfra**|**zai-org/GLM-4.5**|`v1`|1.6e-06 (per 1 token)|1 token\n|**deepinfra**|**lizpreciatior/lzlv_70b_fp16_hf**|`v1`|9e-07 (per 1 token)|1 token\n|**deepinfra**|**Gryphe/MythoMax-L2-13b**|`v1`|2.2e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mistral-7B-Instruct-v0.1**|`v1`|1.3e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-2-70b-chat-hf**|`v1`|9e-07 (per 1 token)|1 token\n|**deepinfra**|**cognitivecomputations/dolphin-2.6-mixtral-8x7b**|`v1`|2.7e-07 (per 1 token)|1 token\n|**deepinfra**|**Phind/Phind-CodeLlama-34B-v2**|`v1`|6e-07 (per 1 token)|1 token\n|**deepinfra**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`v1`|2.7e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Llama-2-13b-chat-hf**|`v1`|2.2e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3-8B-Instruct**|`v1`|8e-08 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3-70B-Instruct**|`v1`|7.9e-07 (per 1 token)|1 token\n|**deepinfra**|**meta-llama/Meta-Llama-3.1-405B-Instruct**|`v1`|9e-07 (per 1 token)|1 token\n|**deepinfra**|**openchat/openchat_3.5**|`v1`|1.3e-07 (per 1 token)|1 token\n|**deepinfra**|**deepseek-ai/DeepSeek-R1-0528-Turbo**|`v1`|3e-06 (per 1 token)|1 token\n|**cerebras**|**llama3.1-8b**|`llmengine (v2)`|1e-07 (per 1 token)|1 token\n|**cerebras**|**gpt-oss-120b**|`llmengine (v2)`|6.9e-07 (per 1 token)|1 token\n|**cloudflare**|**@cf/meta/llama-2-7b-chat-fp16**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**cloudflare**|**@cf/meta/llama-2-7b-chat-int8**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**cloudflare**|**@cf/mistral/mistral-7b-instruct-v0.1**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**cloudflare**|**@hf/thebloke/codellama-7b-instruct-awq**|`llmengine (v2)`|1.923e-06 (per 1 token)|1 token\n|**databricks**|**databricks-claude-3-7-sonnet**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-haiku-4-5**|`llmengine (v2)`|5e-06 (per 1 token)|1 token\n|**databricks**|**databricks-claude-opus-4-1**|`llmengine (v2)`|7.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-opus-4-5**|`llmengine (v2)`|2.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-sonnet-4**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-claude-sonnet-4-5**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gemini-2-5-flash**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**databricks**|**databricks-gemini-2-5-pro**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gemma-3-12b**|`llmengine (v2)`|5e-07 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5-1**|`llmengine (v2)`|1e-05 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5-mini**|`llmengine (v2)`|2e-06 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-5-nano**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-oss-120b**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**databricks**|**databricks-gpt-oss-20b**|`llmengine (v2)`|3e-07 (per 1 token)|1 token\n|**databricks**|**databricks-llama-4-maverick**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**databricks**|**databricks-meta-llama-3-1-405b-instruct**|`llmengine (v2)`|1.5e-05 (per 1 token)|1 token\n|**databricks**|**databricks-meta-llama-3-1-8b-instruct**|`llmengine (v2)`|4.5e-07 (per 1 token)|1 token\n|**databricks**|**databricks-meta-llama-3-3-70b-instruct**|`llmengine (v2)`|1.5e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen2p5-vl-32b-instruct**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-235b-a22b**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-235b-a22b-instruct-2507**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-235b-a22b-thinking-2507**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-8b**|`llmengine (v2)`|2e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-235b-a22b-instruct**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-235b-a22b-thinking**|`llmengine (v2)`|8.8e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-30b-a3b-instruct**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-vl-30b-a3b-thinking**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-r1-0528**|`llmengine (v2)`|8e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3-0324**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3p1**|`llmengine (v2)`|1.68e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3p1-terminus**|`llmengine (v2)`|1.68e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/deepseek-v3p2**|`llmengine (v2)`|1.68e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/glm-4p6**|`llmengine (v2)`|2.19e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/gpt-oss-120b**|`llmengine (v2)`|6e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/gpt-oss-20b**|`llmengine (v2)`|2e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/kimi-k2-instruct-0905**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/kimi-k2-thinking**|`llmengine (v2)`|2.5e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/qwen3-coder-480b-a35b-instruct**|`llmengine (v2)`|1.8e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/llama-v3p3-70b-instruct**|`llmengine (v2)`|9e-07 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/minimax-m2**|`llmengine (v2)`|1.2e-06 (per 1 token)|1 token\n|**fireworks_ai**|**accounts/fireworks/models/mixtral-8x22b-instruct**|`llmengine (v2)`|1.2e-06 (per 1 token)|1 token\n|**ovhcloud**|**DeepSeek-R1-Distill-Llama-70B**|`llmengine (v2)`|6.7e-07 (per 1 token)|1 token\n|**ovhcloud**|**Llama-3.1-8B-Instruct**|`llmengine (v2)`|1e-07 (per 1 token)|1 token\n|**ovhcloud**|**Meta-Llama-3_3-70B-Instruct**|`llmengine (v2)`|6.7e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mistral-7B-Instruct-v0.3**|`llmengine (v2)`|1e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mistral-Nemo-Instruct-2407**|`llmengine (v2)`|1.3e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mistral-Small-3.2-24B-Instruct-2506**|`llmengine (v2)`|2.8e-07 (per 1 token)|1 token\n|**ovhcloud**|**Mixtral-8x7B-Instruct-v0.1**|`llmengine (v2)`|6.3e-07 (per 1 token)|1 token\n|**ovhcloud**|**Qwen2.5-Coder-32B-Instruct**|`llmengine (v2)`|8.7e-07 (per 1 token)|1 token\n|**ovhcloud**|**Qwen2.5-VL-72B-Instruct**|`llmengine (v2)`|9.1e-07 (per 1 token)|1 token\n|**ovhcloud**|**Qwen3-32B**|`llmengine (v2)`|2.3e-07 (per 1 token)|1 token\n|**ovhcloud**|**gpt-oss-120b**|`llmengine (v2)`|4e-07 (per 1 token)|1 token\n|**ovhcloud**|**gpt-oss-20b**|`llmengine (v2)`|1.5e-07 (per 1 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**amazon**|`amazon.nova-pro-v1:0`|\n|**anthropic**|`claude-3-7-sonnet-latest`|\n|**cohere**|`command-r`|\n|**deepseek**|`deepseek-chat`|\n|**meta**|`meta.llama3-1-70b-instruct-v1:0`|\n|**mistral**|`mistral-large-latest`|\n|**openai**|`gpt-4o`|\n|**together_ai**|`Qwen/Qwen2.5-72B-Instruct-Turbo`|\n|**xai**|`grok-2-latest`|\n|**google**|`gemini-2.0-flash`|\n|**groq**|`llama-3.3-70b-versatile`|\n|**microsoft**|`gpt-4o`|\n|**minimax**|`MiniMax-M1`|\n|**bytedance**|`seed-1-6-250915`|\n|**perplexityai**|`sonar`|\n|**deepinfra**|`nvidia/Llama-3.3-Nemotron-Super-49B-v1.5`|\n|**cerebras**|`gpt-oss-120b`|\n\n
", "summary": "Chat", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/llmchatllmchatChatRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/llmchatResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/multimodal/chat/": {"post": {"operationId": "multimodal_multimodal_chat_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**anthropic**|**claude-3-5-haiku-latest**|`bedrock-2023-05-31`|4.0 (per 1000000 token)|1 token\n|**anthropic**|-|`bedrock-2023-05-31`|15.0 (per 1000000 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20250219**|`bedrock-2023-05-31`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|-|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|18.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|10.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1000000 token)|1 token\n|**openai**|**o1**|`v1Beta`|60.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4-turbo**|`v1Beta`|30.0 (per 1000000 token)|1 token\n|**openai**|-|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1Beta`|8.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v1Beta`|1.6 (per 1000000 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1Beta`|0.6 (per 1000000 token)|1 token\n|**openai**|**gpt-5.2**|`v1Beta`|14.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5**|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5-mini**|`v1Beta`|2.0 (per 1000000 token)|1 token\n|**openai**|**gpt-5-nano**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**o1-mini**|`v1Beta`|12.0 (per 1000000 token)|1 token\n|**openai**|**o1-2024-12-17**|`v1Beta`|60.0 (per 1000000 token)|1 token\n|**xai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|10.0 (per 1000000 token)|1 token\n|**amazon**|-|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**mistral**|-|`v0.0.1`|6.0 (per 1000000 token)|1 token\n|**mistral**|**pixtral-large-latest**|`v0.0.1`|6.0 (per 1000000 token)|1 token\n|**microsoft**|**gpt-4o**|`Azure AI Foundry`|15.0 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**anthropic**|`claude-3-5-sonnet-latest`|\n|**google**|`gemini-2.0-flash`|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-vision-latest`|\n|**amazon**|`amazon.nova-pro-v1:0`|\n|**mistral**|`pixtral-large-latest`|\n|**microsoft**|`gpt-4o`|\n\n
", "summary": "Chat", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/multimodalchatChatRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/multimodalchatResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/anonymization_async/": {"get": {"operationId": "ocr_ocr_anonymization_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Anonymization List Job", "tags": ["Anonymization Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_anonymization_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**readyredact**|`v1`|0.05 (per 1 file)|1 file\n|**base64**|`v1`|0.25 (per 1 page)|1 page\n|**privateai**|`v3`|0.01 (per 1 page)|1 page\n\n\n
\n\n", "summary": "Anonymization Launch Job", "tags": ["Anonymization Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AnonymizationAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AnonymizationAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_anonymization_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Anonymization delete Jobs", "tags": ["Anonymization Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/anonymization_async/{public_id}/": {"get": {"operationId": "ocr_ocr_anonymization_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Anonymization Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Anonymization Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocranonymization_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/bank_check_parsing/": {"post": {"operationId": "ocr_ocr_bank_check_parsing_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**base64**|`latest`|0.25 (per 1 page)|1 page\n|**veryfi**|`v8`|0.16 (per 1 request)|1 request\n|**mindee**|`v1`|0.1 (per 1 page)|1 page\n|**extracta**|`v1`|0.1 (per 1 page)|1 page\n\n\n
\n\n", "summary": "Bank Check Parsing", "tags": ["Bank Check Parsing"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrbank_check_parsingResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/custom_document_parsing_async/": {"get": {"operationId": "ocr_ocr_custom_document_parsing_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Custom Document Parsing List Job", "tags": ["Custom Document Parsing Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_custom_document_parsing_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 1.26.8`|15.0 (per 1000 page)|1 page\n|**extracta**|`v1`|0.1 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Italian**|`it`|\n|**Portuguese**|`pt`|\n|**Spanish**|`es`|\n\n
", "summary": "Custom Document Parsing Launch Job", "tags": ["Custom Document Parsing Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_custom_document_parsing_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Custom Document Parsing delete Jobs", "tags": ["Custom Document Parsing Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/custom_document_parsing_async/{public_id}/": {"get": {"operationId": "ocr_ocr_custom_document_parsing_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Custom Document Parsing Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Custom Document Parsing Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocrcustom_document_parsing_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/data_extraction/": {"post": {"operationId": "ocr_ocr_data_extraction_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|0.05 (per 1 page)|1 page\n|**base64**|`latest`|0.25 (per 1 page)|1 page\n\n\n
\n\n", "summary": "Data Extraction", "tags": ["Data Extraction"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrdata_extractionDataExtractionRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrdata_extractionDataExtractionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrdata_extractionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/financial_parser/": {"post": {"operationId": "ocr_ocr_financial_parser_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Document Type|Price|Billing unit|\n|----|----|-------|------|-----|------------|\n|**affinda**|-|`v3`|`invoice`|0.08 (per 1 page)|1 page\n|**affinda**|-|`v3`|`receipt`|0.07 (per 1 page)|1 page\n|**amazon**|-|`boto3 1.26.8`|-|0.01 (per 1 page)|1 page\n|**base64**|-|`latest`|-|0.25 (per 1 page)|1 page\n|**google**|-|`DocumentAI v1 beta3`|`invoice`|0.01 (per 1 page)|10 page\n|**google**|-|`DocumentAI v1 beta3`|`receipt`|0.01 (per 1 page)|10 page\n|**klippa**|-|`v1`|-|0.1 (per 1 file)|1 file\n|**microsoft**|-|`rest API 4.0 (2024-02-29-preview)`|-|0.01 (per 1 page)|1 page\n|**mindee**|-|`v1.2`|-|0.1 (per 1 page)|1 page\n|**tabscanner**|-|`latest`|-|0.08 (per 1 page)|1 page\n|**veryfi**|-|`v8`|`receipt`|0.08 (per 1 file)|1 file\n|**veryfi**|-|`v8`|`invoice`|0.16 (per 1 file)|1 file\n|**eagledoc**|-|`v1`|-|0.03 (per 1 page)|1 page\n|**extracta**|-|`v1`|-|0.1 (per 1 page)|1 page\n|**openai**|**gpt-4o**|`v1.0`|-|0.04 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Catalan (Spain)**|`ca-ES`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-cn`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Taiwan)**|`zh-tw`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**French (Canada)**|`fr-CA`|\n|**French (France)**|`fr-FR`|\n|**French (Switzerland)**|`fr-CH`|\n|**German (Germany)**|`de-DE`|\n|**German (Switzerland)**|`de-CH`|\n|**Italian (Italy)**|`it-IT`|\n|**Italian (Switzerland)**|`it-CH`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Spanish (Spain)**|`es-ES`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Financial Parser", "tags": ["Financial Parser"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrfinancial_parserResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/identity_parser/": {"post": {"operationId": "ocr_ocr_identity_parser_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|0.025 (per 1 page)|1 page\n|**base64**|-|`latest`|0.2 (per 1 page)|1 page\n|**microsoft**|-|`rest API 4.0 (2024-02-29-preview)`|0.01 (per 1 page)|1 page\n|**mindee**|-|`v2`|0.1 (per 1 page)|1 page\n|**klippa**|-|`v1`|0.1 (per 1 file)|1 file\n|**affinda**|-|`v3`|0.07 (per 1 file)|1 file\n|**openai**|**gpt-4o**|`v1`|0.02 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Arabic**|`ar`|\n|**Bengali**|`bn`|\n|**Bulgarian**|`bg`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Kannada**|`kn`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Macedonian**|`mk`|\n|**Malayalam**|`ml`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Vietnamese**|`vi`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-cn`|\n|**Chinese (Taiwan)**|`zh-tw`|\n|**English (United States)**|`en-US`|\n|**French (France)**|`fr-FR`|\n|**German (Germany)**|`de-DE`|\n|**Italian (Italy)**|`it-IT`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Spanish (Spain)**|`es-ES`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Identity Parser", "tags": ["Identity Parser"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocridentity_parserIdentityParserRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocridentity_parserIdentityParserRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocridentity_parserResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/ocr/": {"post": {"operationId": "ocr_ocr_ocr_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|1.5 (per 1000 page)|1 page\n|**google**|`v1`|1.5 (per 1000 page)|1 page\n|**microsoft**|`v3.2`|1.0 (per 1000 page)|1 page\n|**sentisight**|`v3.3.1`|0.75 (per 1000 file)|1 file\n|**api4ai**|`v1.0.0`|3.0 (per 1000 request)|1 request\n|**mistral**|`v1`|1.0 (per 1000 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Abaza**|`abq`|\n|**Adyghe**|`ady`|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Angika**|`anp`|\n|**Arabic**|`ar`|\n|**Assamese**|`as`|\n|**Asturian**|`ast`|\n|**Avaric**|`av`|\n|**Awadhi**|`awa`|\n|**Azerbaijani**|`az`|\n|**Bagheli**|`bfy`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bhojpuri**|`bho`|\n|**Bihari languages**|`bh`|\n|**Bislama**|`bi`|\n|**Bodo (India)**|`brx`|\n|**Bosnian**|`bs`|\n|**Braj**|`bra`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Bundeli**|`bns`|\n|**Buriat**|`bua`|\n|**Camling**|`rab`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chamorro**|`ch`|\n|**Chechen**|`ce`|\n|**Chhattisgarhi**|`hne`|\n|**Chinese**|`zh`|\n|**Cornish**|`kw`|\n|**Corsican**|`co`|\n|**Crimean Tatar**|`crh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dargwa**|`dar`|\n|**Dari**|`prs`|\n|**Dhimal**|`dhi`|\n|**Dogri (macrolanguage)**|`doi`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Erzya**|`myv`|\n|**Estonian**|`et`|\n|**Faroese**|`fo`|\n|**Fijian**|`fj`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Friulian**|`fur`|\n|**Gagauz**|`gag`|\n|**Galician**|`gl`|\n|**German**|`de`|\n|**Gilbertese**|`gil`|\n|**Goan Konkani**|`gom`|\n|**Gondi**|`gon`|\n|**Gurung**|`gvr`|\n|**Haitian**|`ht`|\n|**Halbi**|`hlb`|\n|**Hani**|`hni`|\n|**Haryanvi**|`bgc`|\n|**Hawaiian**|`haw`|\n|**Hindi**|`hi`|\n|**Hmong Daw**|`mww`|\n|**Ho**|`hoc`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Inari Sami**|`smn`|\n|**Indonesian**|`id`|\n|**Ingush**|`inh`|\n|**Interlingua (International Auxiliary Language Association)**|`ia`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Jaunsari**|`jns`|\n|**Javanese**|`jv`|\n|**K'iche'**|`quc`|\n|**Kabardian**|`kbd`|\n|**Kabuverdianu**|`kea`|\n|**Kachin**|`kac`|\n|**Kalaallisut**|`kl`|\n|**Kangri**|`xnr`|\n|**Kara-Kalpak**|`kaa`|\n|**Karachay-Balkar**|`krc`|\n|**Kashubian**|`csb`|\n|**Kazakh**|`kk`|\n|**Khaling**|`klr`|\n|**Khasi**|`kha`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Korku**|`kfq`|\n|**Koryak**|`kpy`|\n|**Kosraean**|`kos`|\n|**Kumarbhag Paharia**|`kmj`|\n|**Kumyk**|`kum`|\n|**Kurdish**|`ku`|\n|**Kurukh**|`kru`|\n|**K\u00f6lsch**|`ksh`|\n|**Lak**|`lbe`|\n|**Lakota**|`lkt`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lezghian**|`lez`|\n|**Lithuanian**|`lt`|\n|**Lower Sorbian**|`dsb`|\n|**Lule Sami**|`smj`|\n|**Luxembourgish**|`lb`|\n|**Mahasu Pahari**|`bfz`|\n|**Maithili**|`mai`|\n|**Malay (macrolanguage)**|`ms`|\n|**Maltese**|`mt`|\n|**Manx**|`gv`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Marshallese**|`mh`|\n|**Mongolian**|`mn`|\n|**Montenegrin**|`cnr`|\n|**Neapolitan**|`nap`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Newari**|`new`|\n|**Niuean**|`niu`|\n|**Nogai**|`nog`|\n|**Northern Sami**|`se`|\n|**Norwegian**|`no`|\n|**Occitan (post 1500)**|`oc`|\n|**Old English (ca. 450-1100)**|`ang`|\n|**Ossetian**|`os`|\n|**Pali**|`pi`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Romansh**|`rm`|\n|**Russian**|`ru`|\n|**Sadri**|`sck`|\n|**Samoan**|`sm`|\n|**Sanskrit**|`sa`|\n|**Santali**|`sat`|\n|**Scots**|`sco`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Sherpa**|`xsr`|\n|**Sirmauri**|`srx`|\n|**Skolt Sami**|`sms`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sami**|`sma`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tabassaran**|`tab`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tatar**|`tt`|\n|**Tetum**|`tet`|\n|**Thangmi**|`thf`|\n|**Tonga (Tonga Islands)**|`to`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Tuvinian**|`tyv`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Upper Sorbian**|`hsb`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Volap\u00fck**|`vo`|\n|**Walser**|`wae`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Yucateco**|`yua`|\n|**Zhuang**|`za`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Arabic (Pseudo-Accents)**|`ar-XA`|\n|**Belarusian**|`be-cyrl`|\n|**Belarusian (Latin)**|`be-latn`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Danish (Denmark)**|`da-DK`|\n|**Dutch (Netherlands)**|`nl-NL`|\n|**English (United States)**|`en-US`|\n|**Finnish (Finland)**|`fi-FI`|\n|**French (France)**|`fr-FR`|\n|**German (Germany)**|`de-DE`|\n|**Hungarian (Hungary)**|`hu-HU`|\n|**Italian (Italy)**|`it-IT`|\n|**Japanese (Japan)**|`ja-JP`|\n|**Kara-Kalpak (Cyrillic)**|`kaa-Cyrl`|\n|**Kazakh**|`kk-cyrl`|\n|**Kazakh (Latin)**|`kk-latn`|\n|**Korean (South Korea)**|`ko-KR`|\n|**Kurdish (Arabic)**|`ku-arab`|\n|**Kurdish (Latin)**|`ku-latn`|\n|**Polish**|`pl-PO`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Region: Czechia**|`cz-CZ`|\n|**Region: Greece**|`gr-GR`|\n|**Russian (Russia)**|`ru-RU`|\n|**Serbian (Cyrillic, Montenegro)**|`sr-Cyrl-ME`|\n|**Serbian (Latin)**|`sr-latn`|\n|**Serbian (Latin, Montenegro)**|`sr-Latn-ME`|\n|**Spanish (Spain)**|`es-ES`|\n|**Swedish (Sweden)**|`sv-SE`|\n|**Turkish (T\u00fcrkiye)**|`tr-TR`|\n|**Uzbek (Arabic)**|`uz-arab`|\n|**Uzbek (Cyrillic)**|`uz-cyrl`|\n\n
", "summary": "OCR", "tags": ["Ocr"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrocrOcrRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrocrOcrRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrocrResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/ocr_async/": {"get": {"operationId": "ocr_ocr_ocr_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Ocr Async List Job", "tags": ["Ocr Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_ocr_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|1.5 (per 1000 page)|1 page\n|**microsoft**|`rest API 4.0 (2024-02-29-preview)`|10.0 (per 1000 page)|1 page\n|**mistral**|`v1`|1.0 (per 1000 page)|1 page\n\n\n
\n\n", "summary": "Ocr Async Launch Job", "tags": ["Ocr Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncOcrRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncOcrRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_ocr_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Ocr Async delete Jobs", "tags": ["Ocr Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/ocr_async/{public_id}/": {"get": {"operationId": "ocr_ocr_ocr_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Ocr Async Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Ocr Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocrocr_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/ocr_tables_async/": {"get": {"operationId": "ocr_ocr_ocr_tables_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "OCR Tables List Job", "tags": ["Ocr Tables Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "ocr_ocr_ocr_tables_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3 (v1.15.18)`|15.0 (per 1000 page)|1 page\n|**google**|`DocumentAI v1 beta3`|65.0 (per 1000 page)|1 page\n|**microsoft**|`rest API 4.0 (2024-02-29-preview)`|10.0 (per 1000 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Angika**|`anp`|\n|**Arabic**|`ar`|\n|**Asturian**|`ast`|\n|**Awadhi**|`awa`|\n|**Azerbaijani**|`az`|\n|**Bagheli**|`bfy`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bhojpuri**|`bho`|\n|**Bislama**|`bi`|\n|**Bodo (India)**|`brx`|\n|**Bosnian**|`bs`|\n|**Braj**|`bra`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Bundeli**|`bns`|\n|**Buriat**|`bua`|\n|**Camling**|`rab`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chamorro**|`ch`|\n|**Chhattisgarhi**|`hne`|\n|**Chinese**|`zh`|\n|**Cornish**|`kw`|\n|**Corsican**|`co`|\n|**Crimean Tatar**|`crh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dari**|`prs`|\n|**Dhimal**|`dhi`|\n|**Dogri (macrolanguage)**|`doi`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Erzya**|`myv`|\n|**Estonian**|`et`|\n|**Faroese**|`fo`|\n|**Fijian**|`fj`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Friulian**|`fur`|\n|**Gagauz**|`gag`|\n|**Galician**|`gl`|\n|**German**|`de`|\n|**Gilbertese**|`gil`|\n|**Gondi**|`gon`|\n|**Gurung**|`gvr`|\n|**Haitian**|`ht`|\n|**Halbi**|`hlb`|\n|**Hani**|`hni`|\n|**Haryanvi**|`bgc`|\n|**Hawaiian**|`haw`|\n|**Hindi**|`hi`|\n|**Hmong Daw**|`mww`|\n|**Ho**|`hoc`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Inari Sami**|`smn`|\n|**Indonesian**|`id`|\n|**Interlingua (International Auxiliary Language Association)**|`ia`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Jaunsari**|`jns`|\n|**Javanese**|`jv`|\n|**K'iche'**|`quc`|\n|**Kabuverdianu**|`kea`|\n|**Kachin**|`kac`|\n|**Kalaallisut**|`kl`|\n|**Kangri**|`xnr`|\n|**Kara-Kalpak**|`kaa`|\n|**Karachay-Balkar**|`krc`|\n|**Kashubian**|`csb`|\n|**Kazakh**|`kk`|\n|**Khaling**|`klr`|\n|**Khasi**|`kha`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Korku**|`kfq`|\n|**Koryak**|`kpy`|\n|**Kosraean**|`kos`|\n|**Kumarbhag Paharia**|`kmj`|\n|**Kumyk**|`kum`|\n|**Kurdish**|`ku`|\n|**Kurukh**|`kru`|\n|**K\u00f6lsch**|`ksh`|\n|**Lakota**|`lkt`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Lower Sorbian**|`dsb`|\n|**Lule Sami**|`smj`|\n|**Luxembourgish**|`lb`|\n|**Mahasu Pahari**|`bfz`|\n|**Malay (macrolanguage)**|`ms`|\n|**Maltese**|`mt`|\n|**Manx**|`gv`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Mongolian**|`mn`|\n|**Montenegrin**|`cnr`|\n|**Neapolitan**|`nap`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Niuean**|`niu`|\n|**Nogai**|`nog`|\n|**Northern Sami**|`se`|\n|**Norwegian**|`no`|\n|**Occitan (post 1500)**|`oc`|\n|**Ossetian**|`os`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Romansh**|`rm`|\n|**Russian**|`ru`|\n|**Sadri**|`sck`|\n|**Samoan**|`sm`|\n|**Sanskrit**|`sa`|\n|**Santali**|`sat`|\n|**Scots**|`sco`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Sherpa**|`xsr`|\n|**Sirmauri**|`srx`|\n|**Skolt Sami**|`sms`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sami**|`sma`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tatar**|`tt`|\n|**Tetum**|`tet`|\n|**Thangmi**|`thf`|\n|**Tonga (Tonga Islands)**|`to`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Tuvinian**|`tyv`|\n|**Uighur**|`ug`|\n|**Upper Sorbian**|`hsb`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Volap\u00fck**|`vo`|\n|**Walser**|`wae`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Yucateco**|`yua`|\n|**Zhuang**|`za`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Belarusian**|`be-Cyrl`|\n|**Belarusian (Latin)**|`be-Latn`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Kara-Kalpak (Cyrillic)**|`kaa-Cyrl`|\n|**Kazakh**|`kk-Cyrl`|\n|**Kazakh (Latin)**|`kk-Latn`|\n|**Kurdish (Arabic)**|`ku-Arab`|\n|**Kurdish (Latin)**|`ku-Latn`|\n|**Serbian (Cyrillic)**|`sr-Cyrl`|\n|**Serbian (Cyrillic, Montenegro)**|`sr-Cyrl-ME`|\n|**Serbian (Latin)**|`sr-Latn`|\n|**Serbian (Latin, Montenegro)**|`sr-Latn-ME`|\n|**Uzbek (Arabic)**|`uz-Arab`|\n|**Uzbek (Cyrillic)**|`uz-cyrl`|\n\n
", "summary": "OCR Tables Launch Job", "tags": ["Ocr Tables Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/OcrTablesAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/OcrTablesAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "ocr_ocr_ocr_tables_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "OCR Tables delete Jobs", "tags": ["Ocr Tables Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/ocr/ocr_tables_async/{public_id}/": {"get": {"operationId": "ocr_ocr_ocr_tables_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "OCR Tables Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Ocr Tables Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncocrocr_tables_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/ocr/resume_parser/": {"post": {"operationId": "ocr_ocr_resume_parser_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**affinda**|-|`v3`|0.07 (per 1 file)|1 file\n|**klippa**|-|`v1`|0.1 (per 1 file)|1 file\n|**senseloaf**|-|`v3`|0.045 (per 1 file)|1 file\n|**extracta**|-|`v1`|0.1 (per 1 page)|1 page\n|**openai**|-|`v1.0`|0.04 (per 1 page)|1 page\n|**openai**|**gpt-4o**|`v1.0`|0.04 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (China)**|`zh-cn`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Taiwan)**|`zh-tw`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Resume Parser", "tags": ["Resume Parser"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrresume_parserResumeParserRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ocrresume_parserResumeParserRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ocrresume_parserResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/prompts/": {"get": {"operationId": "prompts_prompts_list", "summary": "List Prompts", "parameters": [{"name": "page", "required": false, "in": "query", "description": "A page number within the paginated result set.", "schema": {"type": "integer"}}, {"name": "page_size", "required": false, "in": "query", "description": "Number of results to return per page.", "schema": {"type": "integer"}}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedPromptCreateList"}}}, "description": ""}}}, "post": {"operationId": "prompts_prompts_create", "summary": "Create Prompts", "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptCreateRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptCreateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptCreateRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptCreate"}}}, "description": ""}}}}, "/prompts/{name}/": {"get": {"operationId": "prompts_prompts_retrieve", "summary": "Get Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "post": {"operationId": "prompts_prompts_create_2", "description": "Call prompt with your variables in prompt_context. You can override params saved in the prompt and specifiy file urls", "summary": "Call prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptCallRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptCallRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptCallRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "put": {"operationId": "prompts_prompts_update", "summary": "Update Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdateRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptUpdateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "patch": {"operationId": "prompts_prompts_partial_update", "summary": "Update Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedPromptUpdateRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedPromptUpdateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedPromptUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptUpdate"}}}, "description": ""}}}, "delete": {"operationId": "prompts_prompts_destroy", "summary": "Delete Prompt", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/prompts/{name}/history/": {"get": {"operationId": "prompts_prompts_history_list", "summary": "List Prompt History", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}, {"name": "page", "required": false, "in": "query", "description": "A page number within the paginated result set.", "schema": {"type": "integer"}}, {"name": "page_size", "required": false, "in": "query", "description": "Number of results to return per page.", "schema": {"type": "integer"}}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedPromptHistoryList"}}}, "description": ""}}}, "post": {"operationId": "prompts_prompts_history_create", "summary": "Create Prompt History", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}}, "/prompts/{name}/history/{id}/": {"get": {"operationId": "prompts_prompts_history_retrieve", "summary": "Get Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}, "put": {"operationId": "prompts_prompts_history_update", "summary": "Update Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PromptHistoryRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}, "patch": {"operationId": "prompts_prompts_history_partial_update", "summary": "Update Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedPromptHistoryRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedPromptHistoryRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedPromptHistoryRequest"}}}}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PromptHistory"}}}, "description": ""}}}, "delete": {"operationId": "prompts_prompts_history_destroy", "summary": "Delete Prompt History", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/prompts/{name}/history/{id}/template-variables/": {"get": {"operationId": "prompts_prompts_history_template_variables_retrieve", "summary": "Get Prompt Template Variables", "parameters": [{"in": "path", "name": "id", "schema": {"type": "integer"}, "required": true}, {"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["Prompts"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"description": "No response body"}}}}, "/resources/": {"get": {"operationId": "resources_resources_list", "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/ResourceList"}}}}, "description": ""}}}, "post": {"operationId": "resources_resources_create", "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceCreateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceCreate"}}}, "description": ""}}}}, "/resources/{resource}/": {"get": {"operationId": "resources_resources_retrieve", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdate"}}}, "description": ""}}}, "put": {"operationId": "resources_resources_update", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdate"}}}, "description": ""}}}, "patch": {"operationId": "resources_resources_partial_update", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedResourceUpdateRequest"}}}}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ResourceUpdate"}}}, "description": ""}}}, "delete": {"operationId": "resources_resources_destroy", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/resources/{resource}/asset/": {"post": {"operationId": "resources_resources_asset_create", "parameters": [{"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetCreateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetCreate"}}}, "description": ""}}}}, "/resources/{resource}/asset/{asset}/": {"get": {"operationId": "resources_resources_asset_retrieve", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdate"}}}, "description": ""}}}, "put": {"operationId": "resources_resources_asset_update", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdateRequest"}}}, "required": true}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdate"}}}, "description": ""}}}, "patch": {"operationId": "resources_resources_asset_partial_update", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedAssetUpdateRequest"}}}}, "security": [{"jwtAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/AssetUpdate"}}}, "description": ""}}}, "delete": {"operationId": "resources_resources_asset_destroy", "parameters": [{"in": "path", "name": "asset", "schema": {"type": "string"}, "required": true}, {"in": "path", "name": "resource", "schema": {"type": "string"}, "required": true}], "tags": ["resources"], "security": [{"jwtAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/text/ai_detection/": {"post": {"operationId": "text_text_ai_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**sapling**|`v1`|5.0 (per 1000000 char)|1000 char\n|**winstonai**|`v2`|14.0 (per 1000000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Chinese**|`zh`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Spanish**|`es`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n\n
", "summary": "AI Content Detection", "tags": ["Ai Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textai_detectionAiDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textai_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/anonymization/": {"post": {"operationId": "text_text_anonymization_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|-|`v3.0.0`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**microsoft**|-|`v3.1`|0.25 (per 1000000 char)|1000 char\n|**privateai**|-|`v3`|5.0 (per 1000000 char)|100 char\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Arabic**|`ar`|\n|**Bambara**|`bm`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Khmer**|`km`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Malay (macrolanguage)**|`ms`|\n|**Modern Greek (1453-)**|`el`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tamil**|`ta`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Ukrainian**|`uk`|\n|**Vietnamese**|`vi`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Anonymization", "tags": ["Anonymization"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textanonymizationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/chat/": {"post": {"operationId": "text_text_chat_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|**o3-mini**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|-|`v1Beta`|10.0 (per 1000000 token)|1 token\n|**openai**|**o1-preview**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v1Beta`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v1Beta`|1.4e-05 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v1Beta`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v1Beta`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v1Beta`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v1Beta`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v1Beta`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v1Beta`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v1Beta`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v1Beta`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v1Beta`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v1Beta`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v1Beta`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v1Beta`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v1Beta`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v1Beta`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v1Beta`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v1Beta`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v1Beta`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v1Beta`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v1Beta`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v1Beta`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v1Beta`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v1Beta`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v1Beta`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v1Beta`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v1Beta`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v1Beta`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v1Beta`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v1Beta`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v1Beta`|0.0 (per 1 seconde)|1 seconde\n|**google**|**gemini-1.5-flash-8b-latest**|`v1`|0.3 (per 1000000 token)|1 token\n|**google**|-|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|-|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-light**|`2022-12-06`|0.6 (per 1000000 token)|1 token\n|**cohere**|**command-nightly**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-light-nightly**|`2022-12-06`|0.6 (per 1000000 token)|1 token\n|**cohere**|**command-r**|`2022-12-06`|1.5 (per 1000000 token)|1 token\n|**cohere**|**command-r7b-12-2024**|`2022-12-06`|0.15 (per 1000000 token)|1 token\n|**meta**|**llama3-1-405b-instruct-v1:0**|`boto3 (v1.35.84)`|2.4 (per 1000000 token)|1 token\n|**meta**|**llama3-1-70b-instruct-v1:0**|`boto3 (v1.35.84)`|0.72 (per 1000000 token)|1 token\n|**meta**|**llama3-1-8b-instruct-v1:0**|`boto3 (v1.35.84)`|0.22 (per 1000000 token)|1 token\n|**meta**|-|`boto3 (v1.35.84)`|0.15 (per 1000000 token)|1 token\n|**mistral**|-|`v0.0.1`|6.0 (per 1000000 token)|1 token\n|**mistral**|**pixtral-large-latest**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-saba-latest**|`v0.0.1`|0.6 (per 1000000 token)|1 token\n|**mistral**|**mistral-small-latest**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**mistral-large-latest**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**codestral-latest**|`v0.0.1`|3e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2405**|`v0.0.1`|3e-06 (per 1 token)|1 token\n|**mistral**|**codestral-2508**|`v0.0.1`|9e-07 (per 1 token)|1 token\n|**mistral**|**codestral-mamba-latest**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**devstral-medium-2507**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**devstral-small-2505**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-small-2507**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**labs-devstral-small-2512**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**devstral-2512**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-2506**|`v0.0.1`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-2509**|`v0.0.1`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-medium-latest**|`v0.0.1`|5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-2506**|`v0.0.1`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**magistral-small-latest**|`v0.0.1`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2402**|`v0.0.1`|1.2e-05 (per 1 token)|1 token\n|**mistral**|**mistral-large-2407**|`v0.0.1`|9e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-2411**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**mistral-large-3**|`v0.0.1`|1.5e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium**|`v0.0.1`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2312**|`v0.0.1`|8.1e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-2505**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**mistral-medium-latest**|`v0.0.1`|2e-06 (per 1 token)|1 token\n|**mistral**|**mistral-small**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**mistral-tiny**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-codestral-mamba**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-7b**|`v0.0.1`|2.5e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mistral-nemo-2407**|`v0.0.1`|3e-07 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x22b**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**mistral**|**open-mixtral-8x7b**|`v0.0.1`|7e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-12b-2409**|`v0.0.1`|1.5e-07 (per 1 token)|1 token\n|**mistral**|**pixtral-large-2411**|`v0.0.1`|6e-06 (per 1 token)|1 token\n|**perplexityai**|-|`v1.0`|2.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar-pro**|`v1.0`|30.0 (per 1000000 token)|1 token\n|**perplexityai**|**sonar**|`v1.0`|2.0 (per 1000000 token)|1 token\n|**anthropic**|-|`bedrock-2023-05-31`|15.0 (per 1000000 token)|1 token\n|**anthropic**|**claude-3-5-haiku-20241022-v1:0**|`bedrock-2023-05-31`|4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20240620-v1:0**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20241022-v2:0**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20240620-v1:0**|`bedrock-2023-05-31`|1.8e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-haiku-20240307-v1:0**|`bedrock-2023-05-31`|1.25e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-opus-20240229-v1:0**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-sonnet-20240229-v1:0**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-instant-v1**|`bedrock-2023-05-31`|2.4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-v1**|`bedrock-2023-05-31`|2.4e-05 (per 1 token)|1 token\n|**anthropic**|**claude-v2:1**|`bedrock-2023-05-31`|2.4e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-20241022**|`bedrock-2023-05-31`|4e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-haiku-latest**|`bedrock-2023-05-31`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5-20251001**|`bedrock-2023-05-31`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-haiku-4-5**|`bedrock-2023-05-31`|5e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20240620**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-20241022**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-5-sonnet-latest**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-20250219**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-7-sonnet-latest**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-haiku-20240307**|`bedrock-2023-05-31`|1.25e-06 (per 1 token)|1 token\n|**anthropic**|**claude-3-opus-20240229**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-3-opus-latest**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-4-opus-20250514**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-4-sonnet-20250514**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-5-20250929**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-1-20250805**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-20250514**|`bedrock-2023-05-31`|7.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5-20251101**|`bedrock-2023-05-31`|2.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-opus-4-5**|`bedrock-2023-05-31`|2.5e-05 (per 1 token)|1 token\n|**anthropic**|**claude-sonnet-4-20250514**|`bedrock-2023-05-31`|1.5e-05 (per 1 token)|1 token\n|**xai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**deepseek**|**deepseek-v3**|`v1`|1.1e-06 (per 1 token)|1 token\n|**deepseek**|-|`v1`|1.25 (per 1000000 token)|1 token\n|**deepseek**|**deepseek-reasoner**|`v1`|2.19e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-chat**|`v1`|1.1e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-coder**|`v1`|2.8e-07 (per 1 token)|1 token\n|**deepseek**|**deepseek-r1**|`v1`|2.19e-06 (per 1 token)|1 token\n|**deepseek**|**deepseek-v3.2**|`v1`|4e-07 (per 1 token)|1 token\n|**amazon**|-|`boto3 (v1.29.6)`|0.24 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-lite-v1:0**|`boto3 (v1.29.6)`|0.24 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-micro-v1:0**|`boto3 (v1.29.6)`|0.14 (per 1000000 token)|1 token\n|**together_ai**|-|`v1`|1.2 (per 1000000 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-72B-Instruct-Turbo**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo**|`v1`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-21.1b-41b**|`v1`|8e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-4.1b-8b**|`v1`|2e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-41.1b-80b**|`v1`|9e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-8.1b-21b**|`v1`|3e-07 (per 1 token)|1 token\n|**together_ai**|**together-ai-81.1b-110b**|`v1`|1.8e-06 (per 1 token)|1 token\n|**together_ai**|**together-ai-up-to-4b**|`v1`|1e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen2.5-7B-Instruct-Turbo**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Instruct-2507-tput**|`v1`|6e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-Thinking-2507**|`v1`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-235B-A22B-fp8-tput**|`v1`|6e-07 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Coder-480B-A35B-Instruct-FP8**|`v1`|2e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1**|`v1`|7e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-R1-0528-tput**|`v1`|2.19e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3**|`v1`|1.25e-06 (per 1 token)|1 token\n|**together_ai**|**deepseek-ai/DeepSeek-V3.1**|`v1`|1.7e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-3.2-3B-Instruct-Turbo**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**meta-llama/Llama-3.3-70B-Instruct-Turbo-Free**|`v1`|0.0 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8**|`v1`|8.5e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Llama-4-Scout-17B-16E-Instruct**|`v1`|5.9e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo**|`v1`|3.5e-06 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo**|`v1`|8.8e-07 (per 1 token)|1 token\n|**together_ai**|**meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo**|`v1`|1.8e-07 (per 1 token)|1 token\n|**together_ai**|**mistralai/Mistral-7B-Instruct-v0.1**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**mistralai/Mistral-Small-24B-Instruct-2501**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**mistralai/Mixtral-8x7B-Instruct-v0.1**|`v1`|6e-07 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct**|`v1`|3e-06 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-120b**|`v1`|6e-07 (per 1 token)|1 token\n|**together_ai**|**openai/gpt-oss-20b**|`v1`|2e-07 (per 1 token)|1 token\n|**together_ai**|**togethercomputer/CodeLlama-34b-Instruct**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**together_ai**|**zai-org/GLM-4.5-Air-FP8**|`v1`|1.1e-06 (per 1 token)|1 token\n|**together_ai**|**zai-org/GLM-4.6**|`v1`|2.2e-06 (per 1 token)|1 token\n|**together_ai**|**moonshotai/Kimi-K2-Instruct-0905**|`v1`|3e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Instruct**|`v1`|1.5e-06 (per 1 token)|1 token\n|**together_ai**|**Qwen/Qwen3-Next-80B-A3B-Thinking**|`v1`|1.5e-06 (per 1 token)|1 token\n|**microsoft**|**gpt-4o**|`Azure AI Foundry`|15.0 (per 1000000 token)|1 token\n|**groq**|**llama3-70b-8192**|`v1`|0.59 (per 1000000 token)|1 token\n|**groq**|**llama-3.1-8b-instant**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**llama-3.3-70b-versatile**|`v1`|7.9e-07 (per 1 token)|1 token\n|**groq**|**gemma-7b-it**|`v1`|8e-08 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-guard-4-12b**|`v1`|2e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-maverick-17b-128e-instruct**|`v1`|6e-07 (per 1 token)|1 token\n|**groq**|**meta-llama/llama-4-scout-17b-16e-instruct**|`v1`|3.4e-07 (per 1 token)|1 token\n|**groq**|**moonshotai/kimi-k2-instruct-0905**|`v1`|3e-06 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-120b**|`v1`|7.5e-07 (per 1 token)|1 token\n|**groq**|**openai/gpt-oss-20b**|`v1`|5e-07 (per 1 token)|1 token\n|**groq**|**qwen/qwen3-32b**|`v1`|5.9e-07 (per 1 token)|1 token\n\n\n
\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**google**|`gemini-2.5-flash`|\n|**cohere**|`command`|\n|**meta**|`llama3-1-8b-instruct-v1:0`|\n|**mistral**|`mistral-large-latest`|\n|**perplexityai**|`sonar`|\n|**anthropic**|`claude-3-5-sonnet-latest`|\n|**xai**|`grok-2-latest`|\n|**deepseek**|`deepseek-chat`|\n|**amazon**|`amazon.nova-lite-v1:0`|\n|**together_ai**|`Qwen/Qwen2.5-72B-Instruct-Turbo`|\n|**microsoft**|`gpt-4o`|\n|**groq**|`llama3-70b-8192`|\n\n
", "summary": "Chat", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textchatChatRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textchatResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/chat/stream/": {"post": {"operationId": "text_text_chat_stream_create", "description": "Streamed version of Chat feature, the raw text will be streamed chunk by chunk.\n\nNOTE: For this feature, you an only request one provider at a time.", "summary": "Chat Stream", "tags": ["Chat"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textchatChatStreamRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"text/plain": {"schema": {"type": "string"}}}, "description": ""}}}}, "/text/code_generation/": {"post": {"operationId": "text_text_code_generation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v1`|1.5e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v1`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v1`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v1`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v1`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v1`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v1`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v1`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v1`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v1`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v1`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v1`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v1`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v1`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v1`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v1`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v1`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v1`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v1`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v1`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v1`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v1`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v1`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v1`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v1`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v1`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v1`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v1`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v1`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v1`|0.0 (per 1 seconde)|1 seconde\n|**google**|-|`v1`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-latest**|`v1`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n|**xai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**google**|`gemini-1.5-flash`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Code Generation", "tags": ["Code Generation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textcode_generationCodeGenerationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textcode_generationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/embeddings/": {"post": {"operationId": "text_text_embeddings_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|-|`v3.0.0`|0.1 (per 1000000 token)|1 token\n|**openai**|**1536__text-embedding-ada-002**|`v3.0.0`|0.1 (per 1000000 token)|1 token\n|**openai**|**text-embedding-3-large**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-embedding-3-small**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-embedding-ada-002**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-embedding-ada-002-v2**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**google**|**gemini-embedding-001**|`v1`|1.5e-07 (per 1 token)|1 token\n|**cohere**|**embed-english-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-english-light-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-multilingual-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-english-v2.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-english-light-v2.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-multilingual-v2.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-v4.0**|`v1`|0.0 (per 1 token)|1 token\n|**cohere**|**embed-multilingual-light-v3.0**|`v1`|0.0 (per 1 token)|1 token\n|**mistral**|**1024__mistral-embed**|`v0.0.1`|0.1 (per 1000000 token)|1 token\n|**mistral**|-|`v0.0.1`|0.1 (per 1000000 token)|1 token\n|**mistral**|**mistral/mistral-embed**|`v0.0.1`|0.1 (per 1000000 seconde)|1 seconde\n|**mistral**|**mistral-embed**|`v0.0.1`|0.0 (per 1 seconde)|1 seconde\n|**mistral**|**codestral-embed**|`v0.0.1`|0.0 (per 1 seconde)|1 seconde\n|**mistral**|**codestral-embed-2505**|`v0.0.1`|0.0 (per 1 seconde)|1 seconde\n|**jina**|-|`v1`|0.018 (per 1000000 token)|1 token\n|**jina**|**jina-embeddings-v2-base-en**|`v1`|0.018 (per 1000000 token)|1 token\n|**jina**|**jina-embeddings-v3**|`v1`|0.02 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`1536__text-embedding-ada-002`|\n|**google**|`text-multilingual-embedding-002`|\n|**cohere**|`4096__embed-english-v2.0`|\n|**mistral**|`1024__mistral-embed`|\n|**jina**|`jina-embeddings-v3`|\n\n
", "summary": "Embeddings", "tags": ["Embeddings"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textembeddingsEmbeddingsRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textembeddingsResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/emotion_detection/": {"post": {"operationId": "text_text_emotion_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**vernai**|`v1`|2.0 (per 1000 request)|1 request\n\n\n
\n\n", "summary": "Emotion Detection", "tags": ["Emotion Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textemotion_detectionEmotionDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textemotion_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/entity_sentiment/": {"post": {"operationId": "text_text_entity_sentiment_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 1.26.8`|1.0 (per 1000000 char)|300 char\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n|**Japanese**|`ja`|\n|**Spanish**|`es`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-1.5-flash`|\n\n
", "summary": "Entity Sentiment", "tags": ["Entity Sentiment"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textentity_sentimentEntitySentimentRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textentity_sentimentResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/keyword_extraction/": {"post": {"operationId": "text_text_keyword_extraction_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**tenstorrent**|-|`v1.0.0`|0.7 (per 1000000 char)|1000 char\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Arabic**|`ar`|\n|**Bulgarian**|`bg`|\n|**Catalan**|`ca`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Modern Greek (1453-)**|`el`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n|**Turkish**|`tr`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Keyword Extraction", "tags": ["Keyword Extraction"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textkeyword_extractionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/moderation/": {"post": {"operationId": "text_text_moderation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**microsoft**|-|`v1.0`|1.0 (per 1000 request)|1 request\n|**openai**|-|`v3.0.0`|free|-\n|**openai**|**text-moderation-stable**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-moderation-007**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**openai**|**text-moderation-latest**|`v3.0.0`|0.0 (per 1 token)|1 token\n|**google**|-|`v1`|5.0 (per 1000000 char)|100 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Assamese**|`as`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Breton**|`br`|\n|**Bulgarian**|`bg`|\n|**Catalan**|`ca`|\n|**Central Kurdish**|`ckb`|\n|**Cherokee**|`chr`|\n|**Chinese**|`zh`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Fulah**|`ff`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Hausa**|`ha`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Konkani (macrolanguage)**|`kok`|\n|**Korean**|`ko`|\n|**Lao**|`lo`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Norwegian Nynorsk**|`nn`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Pedi**|`nso`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Quechua**|`qu`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Tigrinya**|`ti`|\n|**Tswana**|`tn`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Wolof**|`wo`|\n|**Xhosa**|`xh`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n\n

Supported Models

\n\n", "summary": "Moderation", "tags": ["Moderation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textmoderationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/named_entity_recognition/": {"post": {"operationId": "text_text_named_entity_recognition_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|10.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|10.0 (per 1000000 token)|1 token\n|**tenstorrent**|-|`v1.0.0`|1.0 (per 1000000 char)|1000 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Arabic**|`ar`|\n|**Chinese**|`zh`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hindi**|`hi`|\n|**Hungarian**|`hu`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Russian**|`ru`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n|**Turkish**|`tr`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Named Entity Recognition", "tags": ["Named Entity Recognition"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textnamed_entity_recognitionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/plagia_detection/": {"post": {"operationId": "text_text_plagia_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**winstonai**|`v2`|14.0 (per 1000000 char)|1 char\n\n\n
\n\n", "summary": "Plagia Detection", "tags": ["Plagia Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textplagia_detectionPlagiaDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textplagia_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/prompt_optimization/": {"post": {"operationId": "text_text_prompt_optimization_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n\n\n
\n\n

Supported Models

\n\n", "summary": "Prompt Optimization", "tags": ["Prompt Optimization"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textprompt_optimizationPromptOptimizationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textprompt_optimizationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/sentiment_analysis/": {"post": {"operationId": "text_text_sentiment_analysis_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**google**|**gemini-live-2.5-flash-preview-native-audio-09-2025**|`v1`|2e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-001**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-002**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-latest**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-latest**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-8b-exp-0924**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-flash-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-001**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-002**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0801**|`v1`|1.05e-05 (per 1 token)|1 token\n|**google**|**gemini-1.5-pro-exp-0827**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-001**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview-02-05**|`v1`|3e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-live-001**|`v1`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-preview-image-generation**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash-thinking-exp-01-21**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.0-pro-exp-02-05**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-09-2025**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-09-2025**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-latest**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-flash-lite-latest**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite-preview-06-17**|`v1`|4e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-04-17**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-05-20**|`v1`|2.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-preview-tts**|`v1`|6e-07 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-computer-use-preview-10-2025**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-exp-03-25**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-05-06**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-06-05**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-tts**|`v1`|1e-05 (per 1 token)|1 token\n|**google**|**gemini-exp-1114**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-exp-1206**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-27b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-gemma-2-9b-it**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemini-pro-vision**|`v1`|1.05e-06 (per 1 token)|1 token\n|**google**|**gemma-3-27b-it**|`v1`|0.0 (per 1 token)|1 token\n|**google**|**learnlm-1.5-pro-experimental**|`v1`|0.0 (per 1 token)|1 token\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|20.0 (per 1000000 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**tenstorrent**|-|`v1.1.0`|0.7 (per 1000000 char)|1000 char\n|**sapling**|-|`v1`|20.0 (per 1000000 char)|1000 char\n|**xai**|**grok-2-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v1`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v1`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v1`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v1`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v1`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v1`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Arabic**|`ar`|\n|**Chinese**|`zh`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hindi**|`hi`|\n|**Indonesian**|`id`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Modern Greek (1453-)**|`el`|\n|**Norwegian**|`no`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Russian**|`ru`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Vietnamese**|`vi`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-1.5-flash`|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Sentiment Analysis", "tags": ["Sentiment Analysis"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textsentiment_analysisResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/spell_check/": {"post": {"operationId": "text_text_spell_check_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**prowritingaid**|`v2`|10.0 (per 1000 request)|1 request\n|**sapling**|`v1`|2.0 (per 1000000 char)|1 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n|**jp**|`jp`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Simplified)**|`zh-hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-hant`|\n|**English (United Kingdom)**|`en-gb`|\n|**Portuguese (Brazil)**|`pt-br`|\n|**Portuguese (Portugal)**|`pt-pt`|\n\n
", "summary": "Spell Check", "tags": ["Spell Check"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textspell_checkSpellCheckRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textspell_checkResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/summarize/": {"post": {"operationId": "text_text_summarize_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**microsoft**|-|`v3.1`|2.0 (per 1000000 char)|1000 char\n|**openai**|-|`v3.0.0`|60.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-1106**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0314**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-2024-04-09**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0125**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**chatgpt-4o-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0301**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-0613**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-3.5-turbo-16k-0613**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4-0125-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0314**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-0613**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-1106-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-32k**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-32k-0613**|`v3.0.0`|0.00012 (per 1 token)|1 token\n|**openai**|**gpt-4-turbo-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4-vision-preview**|`v3.0.0`|3e-05 (per 1 token)|1 token\n|**openai**|**gpt-4.1**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-2025-04-14**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-mini-2025-04-14**|`v3.0.0`|1.6e-06 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.1-nano-2025-04-14**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4.5-preview-2025-02-27**|`v3.0.0`|0.00015 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-05-13**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-08-06**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-2024-11-20**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-10-01**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2024-12-17**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-audio-preview-2025-06-03**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-2024-07-18**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-audio-preview-2024-12-17**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-realtime-preview-2024-12-17**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-mini-search-preview-2025-03-11**|`v3.0.0`|6e-07 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-10-01**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2024-12-17**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-realtime-preview-2025-06-03**|`v3.0.0`|2e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-4o-search-preview-2025-03-11**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-2025-11-13**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.1-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-2025-12-11**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5.2-chat-latest**|`v3.0.0`|1.4e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-2025-08-07**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-chat-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**openai**|**gpt-5-mini**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-mini-2025-08-07**|`v3.0.0`|2e-06 (per 1 token)|1 token\n|**openai**|**gpt-5-nano**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-5-nano-2025-08-07**|`v3.0.0`|4e-07 (per 1 token)|1 token\n|**openai**|**gpt-realtime**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**gpt-realtime-mini**|`v3.0.0`|2.4e-06 (per 1 token)|1 token\n|**openai**|**gpt-realtime-2025-08-28**|`v3.0.0`|1.6e-05 (per 1 token)|1 token\n|**openai**|**o1**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-2024-12-17**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o1-mini-2024-09-12**|`v3.0.0`|1.2e-05 (per 1 token)|1 token\n|**openai**|**o1-preview**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o1-preview-2024-09-12**|`v3.0.0`|6e-05 (per 1 token)|1 token\n|**openai**|**o3**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-2025-04-16**|`v3.0.0`|8e-06 (per 1 token)|1 token\n|**openai**|**o3-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o3-mini-2025-01-31**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**o4-mini-2025-04-16**|`v3.0.0`|4.4e-06 (per 1 token)|1 token\n|**openai**|**container**|`v3.0.0`|0.0 (per 1 seconde)|1 seconde\n|**cohere**|-|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**summarize-xlarge**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-nightly**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**cohere**|**command-nightly**|`2022-12-06`|2.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-1212**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-1212**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-2-vision-latest**|`v3.0.0`|1e-05 (per 1 token)|1 token\n|**xai**|**grok-3**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-beta**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-beta**|`v3.0.0`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-fast-latest**|`v3.0.0`|2.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-3-mini**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-beta**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-beta**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-fast-latest**|`v3.0.0`|4e-06 (per 1 token)|1 token\n|**xai**|**grok-3-mini-latest**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-fast-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-fast-non-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-0709**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-latest**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-reasoning-latest**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-4-1-fast-non-reasoning-latest**|`v3.0.0`|5e-07 (per 1 token)|1 token\n|**xai**|**grok-beta**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n|**xai**|**grok-code-fast**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-code-fast-1-0825**|`v3.0.0`|1.5e-06 (per 1 token)|1 token\n|**xai**|**grok-vision-beta**|`v3.0.0`|1.5e-05 (per 1 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Bulgarian**|`bg`|\n|**Chinese**|`zh`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**German**|`de`|\n|**Hungarian**|`hu`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Korean**|`ko`|\n|**Latvian**|`lv`|\n|**Modern Greek (1453-)**|`el`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Spanish**|`es`|\n|**Swedish**|`sv`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Brazil)**|`pt-br`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Portuguese (Portugal)**|`pt-pt`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4`|\n|**cohere**|`summarize-xlarge`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Summarize", "tags": ["Summarize"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textsummarizeSummarizeRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/textsummarizeResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/text/topic_extraction/": {"post": {"operationId": "text_text_topic_extraction_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|-|`v1`|0.6 (per 1000000 char)|1 char\n|**openai**|**gpt-4o**|`v1`|10.0 (per 1000000 token)|1 token\n|**openai**|-|`v1`|10.0 (per 1000000 token)|1 token\n|**tenstorrent**|-|`v1.0.0`|2.0 (per 1000000 char)|1000 char\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**English**|`en`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Topic Extraction", "tags": ["Topic Extraction"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/texttopic_extractionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/translation/automatic_translation/": {"post": {"operationId": "translation_translation_automatic_translation_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|15.0 (per 1000000 char)|1 char\n|**google**|-|`v3`|20.0 (per 1000000 char)|1 char\n|**microsoft**|-|`v3.0`|10.0 (per 1000000 char)|1 char\n|**deepl**|-|`v2`|20.0 (per 1000000 char)|1 char\n|**modernmt**|-|`1.2.8`|8.0 (per 1000000 char)|1 char\n|**openai**|-|`v1`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1`|20.0 (per 1000000 token)|1 token\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Assamese**|`as`|\n|**Azerbaijani**|`az`|\n|**Bashkir**|`ba`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dari**|`prs`|\n|**Dhivehi**|`dv`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Faroese**|`fo`|\n|**Fijian**|`fj`|\n|**Filipino**|`fil`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hmong Daw**|`mww`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Inuinnaqtun**|`ikt`|\n|**Inuktitut**|`iu`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Klingon**|`tlh`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Literary Chinese**|`lzh`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Northern Kurdish**|`kmr`|\n|**Norwegian**|`no`|\n|**Norwegian Bokm\u00e5l**|`nb`|\n|**Norwegian Nynorsk**|`nn`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Quer\u00e9taro Otomi**|`otq`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tahitian**|`ty`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Tibetan**|`bo`|\n|**Tigrinya**|`ti`|\n|**Tonga (Tonga Islands)**|`to`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Upper Sorbian**|`hsb`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Yucateco**|`yua`|\n|**Yue Chinese**|`yue`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Simplified)**|`zh-Hans`|\n|**Chinese (Taiwan)**|`zh-TW`|\n|**Chinese (Traditional)**|`zh-Hant`|\n|**English (United Kingdom)**|`en-GB`|\n|**English (United States)**|`en-US`|\n|**French (Canada)**|`fr-CA`|\n|**Inuktitut (Latin)**|`iu-Latn`|\n|**Klingon (Klingon (KLI pIqaD))**|`tlh-Piqd`|\n|**Klingon (Latin)**|`tlh-Latn`|\n|**Mongolian (Cyrillic)**|`mn-Cyrl`|\n|**Mongolian (Mongolian)**|`mn-Mong`|\n|**Persian (Afghanistan)**|`fa-AF`|\n|**Portuguese (Brazil)**|`pt-BR`|\n|**Portuguese (Portugal)**|`pt-PT`|\n|**Serbian (Cyrillic)**|`sr-Cyrl`|\n|**Serbian (Latin)**|`sr-Latn`|\n|**Spanish (Latin America)**|`es-419`|\n|**Spanish (Mexico)**|`es-MX`|\n|**Spanish (Spain)**|`es-ES`|\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n\n
", "summary": "Automatic Translation", "tags": ["Automatic Translation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationautomatic_translationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/translation/document_translation/": {"post": {"operationId": "translation_translation_document_translation_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**deepl**|`v2`|2.0 (per 20 page)|20 page\n|**google**|`v3`|0.08 (per 1 page)|1 page\n\n\n
\n\n
Supported Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Afrikaans**|`af`|\n|**Albanian**|`sq`|\n|**Amharic**|`am`|\n|**Arabic**|`ar`|\n|**Armenian**|`hy`|\n|**Azerbaijani**|`az`|\n|**Basque**|`eu`|\n|**Belarusian**|`be`|\n|**Bengali**|`bn`|\n|**Bosnian**|`bs`|\n|**Bulgarian**|`bg`|\n|**Burmese**|`my`|\n|**Catalan**|`ca`|\n|**Cebuano**|`ceb`|\n|**Chinese**|`zh`|\n|**Corsican**|`co`|\n|**Croatian**|`hr`|\n|**Czech**|`cs`|\n|**Danish**|`da`|\n|**Dutch**|`nl`|\n|**English**|`en`|\n|**Esperanto**|`eo`|\n|**Estonian**|`et`|\n|**Finnish**|`fi`|\n|**French**|`fr`|\n|**Galician**|`gl`|\n|**Georgian**|`ka`|\n|**German**|`de`|\n|**Gujarati**|`gu`|\n|**Haitian**|`ht`|\n|**Hausa**|`ha`|\n|**Hawaiian**|`haw`|\n|**Hebrew**|`he`|\n|**Hindi**|`hi`|\n|**Hmong**|`hmn`|\n|**Hungarian**|`hu`|\n|**Icelandic**|`is`|\n|**Igbo**|`ig`|\n|**Indonesian**|`id`|\n|**Irish**|`ga`|\n|**Italian**|`it`|\n|**Japanese**|`ja`|\n|**Javanese**|`jv`|\n|**Kannada**|`kn`|\n|**Kazakh**|`kk`|\n|**Khmer**|`km`|\n|**Kinyarwanda**|`rw`|\n|**Kirghiz**|`ky`|\n|**Korean**|`ko`|\n|**Kurdish**|`ku`|\n|**Lao**|`lo`|\n|**Latin**|`la`|\n|**Latvian**|`lv`|\n|**Lithuanian**|`lt`|\n|**Luxembourgish**|`lb`|\n|**Macedonian**|`mk`|\n|**Malagasy**|`mg`|\n|**Malay (macrolanguage)**|`ms`|\n|**Malayalam**|`ml`|\n|**Maltese**|`mt`|\n|**Maori**|`mi`|\n|**Marathi**|`mr`|\n|**Modern Greek (1453-)**|`el`|\n|**Mongolian**|`mn`|\n|**Nepali (macrolanguage)**|`ne`|\n|**Norwegian**|`no`|\n|**Nyanja**|`ny`|\n|**Oriya (macrolanguage)**|`or`|\n|**Panjabi**|`pa`|\n|**Persian**|`fa`|\n|**Polish**|`pl`|\n|**Portuguese**|`pt`|\n|**Pushto**|`ps`|\n|**Romanian**|`ro`|\n|**Russian**|`ru`|\n|**Samoan**|`sm`|\n|**Scottish Gaelic**|`gd`|\n|**Serbian**|`sr`|\n|**Shona**|`sn`|\n|**Sindhi**|`sd`|\n|**Sinhala**|`si`|\n|**Slovak**|`sk`|\n|**Slovenian**|`sl`|\n|**Somali**|`so`|\n|**Southern Sotho**|`st`|\n|**Spanish**|`es`|\n|**Sundanese**|`su`|\n|**Swahili (macrolanguage)**|`sw`|\n|**Swedish**|`sv`|\n|**Tagalog**|`tl`|\n|**Tajik**|`tg`|\n|**Tamil**|`ta`|\n|**Tatar**|`tt`|\n|**Telugu**|`te`|\n|**Thai**|`th`|\n|**Turkish**|`tr`|\n|**Turkmen**|`tk`|\n|**Uighur**|`ug`|\n|**Ukrainian**|`uk`|\n|**Urdu**|`ur`|\n|**Uzbek**|`uz`|\n|**Vietnamese**|`vi`|\n|**Welsh**|`cy`|\n|**Western Frisian**|`fy`|\n|**Xhosa**|`xh`|\n|**Yiddish**|`yi`|\n|**Yoruba**|`yo`|\n|**Zulu**|`zu`|\n\n
Supported Detailed Languages\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**Auto detection**|`auto-detect`|\n|**Chinese (China)**|`zh-CN`|\n|**Chinese (Taiwan)**|`zh-TW`|\n\n
", "summary": "Document Translation", "tags": ["Document Translation"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationdocument_translationResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/translation/language_detection/": {"post": {"operationId": "translation_translation_language_detection_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|-|`boto3 (v1.15.18)`|1.0 (per 1000000 char)|300 char\n|**google**|-|`v1`|20.0 (per 1000000 char)|1 char\n|**microsoft**|-|`v3.1`|1.0 (per 1000000 char)|1000 char\n|**modernmt**|-|`1.1.0`|8.0 (per 1000000 char)|1 char\n|**openai**|-|`v1`|20.0 (per 1000000 token)|1 token\n|**openai**|**gpt-4o**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2-latest**|`v1`|10.0 (per 1000000 token)|1 token\n|**xai**|**grok-2**|`v1`|10.0 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**openai**|`gpt-4o`|\n|**xai**|`grok-2-latest`|\n\n
", "summary": "Language Detection", "tags": ["Language Detection"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/translationlanguage_detectionResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/user/custom_token/": {"get": {"operationId": "user_user_custom_token_list", "summary": "List Tokens", "tags": ["User Management"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/CustomTokensList"}}}}, "description": ""}}}, "post": {"operationId": "user_user_custom_token_create", "summary": "Create new Token", "tags": ["User Management"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokensCreateRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokensCreate"}}}, "description": ""}}}}, "/user/custom_token/{name}/": {"get": {"operationId": "user_user_custom_token_retrieve", "summary": "Retrieve Token", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["User Management"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokensList"}}}, "description": ""}}}, "patch": {"operationId": "user_user_custom_token_partial_update", "summary": "Update Token", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["User Management"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedCustomTokenUpdateRequest"}}}}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/CustomTokenUpdate"}}}, "description": ""}}}, "delete": {"operationId": "user_user_custom_token_destroy", "summary": "Delete Token", "parameters": [{"in": "path", "name": "name", "schema": {"type": "string"}, "required": true}], "tags": ["User Management"], "security": [{"FeatureApiAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/video/deepfake_detection_async/": {"get": {"operationId": "video_video_deepfake_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Deepfake Detection List Jobs", "tags": ["Deepfake Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_deepfake_detection_async_create", "description": "No Provider Available\n\n\n\n", "summary": "Deepfake Detection Launch Job", "tags": ["Deepfake Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_deepfake_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Deepfake Detection delete Jobs", "tags": ["Deepfake Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/deepfake_detection_async/{public_id}/": {"get": {"operationId": "video_video_deepfake_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Deepfake Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Deepfake Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideodeepfake_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/explicit_content_detection_async/": {"get": {"operationId": "video_video_explicit_content_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Explicit Content Detection List Jobs", "tags": ["Explicit Content Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_explicit_content_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Video Explicit Content Detection Launch Job", "tags": ["Explicit Content Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_explicit_content_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Explicit Content delete Jobs", "tags": ["Explicit Content Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/explicit_content_detection_async/{public_id}/": {"get": {"operationId": "video_video_explicit_content_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Explicit Content Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Explicit Content Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoexplicit_content_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/face_detection_async/": {"get": {"operationId": "video_video_face_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Face Detection List Jobs", "tags": ["Face Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_face_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Face Detection Launch Job", "tags": ["Face Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_face_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Face Detection delete Jobs", "tags": ["Face Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/face_detection_async/{public_id}/": {"get": {"operationId": "video_video_face_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Face Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Face Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoface_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/generation_async/": {"get": {"operationId": "video_video_generation_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Generation List Jobs", "tags": ["Generation Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_generation_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**amazon**|**amazon.nova-reel-v1:1**|`boto3`|0.08 (per 1 seconde)|1 seconde\n|**amazon**|**amazon.nova-reel-v1:0**|`boto3`|0.08 (per 1 seconde)|1 seconde\n|**amazon**|-|`boto3`|0.5 (per 1 request)|1 request\n|**minimax**|**MiniMax-Hailuo-2.3**|`v1`|0.28 (per 1 request)|1 request\n|**minimax**|**MiniMax-Hailuo-02**|`v1`|0.28 (per 1 request)|1 request\n|**minimax**|-|`v1`|0.56 (per 1 request)|1 request\n|**minimax**|**S2V-01**|`v1`|0.65 (per 1 request)|1 request\n|**minimax**|**T2V/I2V-01-Director**|`v1`|0.43 (per 1 request)|1 request\n|**bytedance**|**seedance-1-0-pro-250528**|`v3`|0.62 (per 1 request)|1 request\n|**bytedance**|-|`v3`|1.8 (per 1000000 token)|1 token\n|**bytedance**|**seedance-1-0-lite-t2v-250428**|`v3`|1.8 (per 1000000 token)|1 token\n|**google**|**veo-3.1-generate-preview**|`v1Beta`|0.4 (per 1 seconde)|1 seconde\n|**google**|**veo-3.0-generate-001**|`v1Beta`|0.4 (per 1 seconde)|1 seconde\n|**google**|**veo-3.0-fast-generate-001**|`v1Beta`|0.15 (per 1 seconde)|1 seconde\n|**google**|-|`v1Beta`|3.6 (per 1 request)|1 request\n|**openai**|-|`v1`|5.0 (per 1 request)|1 request\n|**openai**|**sora-2**|`v1`|0.1 (per 1 seconde)|1 seconde\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**amazon**|`amazon.nova-reel-v1:1`|\n|**minimax**|`MiniMax-Hailuo-2.3`|\n|**bytedance**|`seedance-1-0-pro-250528`|\n|**google**|`veo-3.1-generate-preview`|\n|**openai**|`sora-2`|\n\n
", "summary": "Generation Launch Job", "tags": ["Generation Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/GenerationAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/GenerationAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_generation_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Generation delete Jobs", "tags": ["Generation Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/generation_async/{public_id}/": {"get": {"operationId": "video_video_generation_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Generation Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Generation Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideogeneration_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/label_detection_async/": {"get": {"operationId": "video_video_label_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Label Detection List Jobs", "tags": ["Label Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_label_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Label Detection Launch Job", "tags": ["Label Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_label_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Label Detection delete Jobs", "tags": ["Label Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/label_detection_async/{public_id}/": {"get": {"operationId": "video_video_label_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Label Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Label Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideolabel_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/logo_detection_async/": {"get": {"operationId": "video_video_logo_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Logo Detection List Jobs", "tags": ["Logo Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_logo_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`Video intelligence v1`|0.15 (per 60 seconde)|1 seconde\n|**twelvelabs**|`v1.1.2`|0.1 (per 60 seconde)|60 seconde\n\n\n
\n\n", "summary": "Video Logo Detection Launch Job", "tags": ["Logo Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_logo_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Logo Detection delete Jobs", "tags": ["Logo Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/logo_detection_async/{public_id}/": {"get": {"operationId": "video_video_logo_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Logo Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Logo Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideologo_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/object_tracking_async/": {"get": {"operationId": "video_video_object_tracking_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Object Tracking List Jobs", "tags": ["Object Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_object_tracking_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`Video intelligence v1`|0.15 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Video Object Tracking Launch Job", "tags": ["Object Tracking Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_object_tracking_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Object Tracking delete Jobs", "tags": ["Object Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/object_tracking_async/{public_id}/": {"get": {"operationId": "video_video_object_tracking_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Object Tracking Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Object Tracking Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoobject_tracking_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/person_tracking_async/": {"get": {"operationId": "video_video_person_tracking_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Person Tracking List Jobs", "tags": ["Person Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_person_tracking_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.1 (per 60 seconde)|1 seconde\n\n\n
\n\n", "summary": "Person Tracking Launch Job", "tags": ["Person Tracking Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_person_tracking_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Person Tracking delete Jobs", "tags": ["Person Tracking Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/person_tracking_async/{public_id}/": {"get": {"operationId": "video_video_person_tracking_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Person Tracking Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Person Tracking Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoperson_tracking_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/question_answer/": {"post": {"operationId": "video_video_question_answer_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|**gemini-3-flash-preview**|`v1Beta`|3e-06 (per 1 token)|1 token\n|**google**|**gemini-3-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|-|`v1Beta`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview**|`v1Beta`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1Beta`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-flash-lite-preview**|`v1Beta`|1.5e-06 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1Beta`|0.3 (per 1000000 token)|1 token\n|**amazon**|-|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n|**amazon**|**amazon.nova-pro-v1:0**|`boto3 (v1.29.6)`|3.2 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-2.5-flash`|\n|**amazon**|`amazon.nova-pro-v1:0`|\n\n
", "summary": "Question Answer", "tags": ["Question Answer"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/videoquestion_answerResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/question_answer_async/": {"get": {"operationId": "video_video_question_answer_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Question Answer List Jobs", "tags": ["Question Answer Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_question_answer_async_create", "description": "
Available Providers\n\n\n\n|Provider|Model|Version|Price|Billing unit|\n|----|----|-------|-----|------------|\n|**google**|**gemini-3-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-3-flash-preview**|`v1Beta`|3e-06 (per 1 token)|1 token\n|**google**|-|`v1Beta`|0.6 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite-preview**|`v1Beta`|0.3 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash-lite**|`v1Beta`|0.4 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-pro-preview**|`v1Beta`|1.2e-05 (per 1 token)|1 token\n|**google**|**gemini-2.5-pro**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.5-flash**|`v1Beta`|2.5 (per 1000000 token)|1 token\n|**google**|**gemini-3.1-flash-lite-preview**|`v1Beta`|1.5e-06 (per 1 token)|None token\n|**google**|**gemini-2.5-pro-preview-03-25**|`v1Beta`|15.0 (per 1000000 token)|1 token\n|**google**|**gemini-2.0-flash-lite**|`v1Beta`|0.3 (per 1000000 token)|1 token\n\n\n
\n\n

Supported Models

\n\n
Default Models\n\n\n\n\n\n|Name|Value|\n|----|-----|\n|**google**|`gemini-2.5-flash`|\n\n
", "summary": "Question Answer Launch Job", "tags": ["Question Answer Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/QuestionAnswerAsyncRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/QuestionAnswerAsyncRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_question_answer_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Question Answer delete Jobs", "tags": ["Question Answer Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/question_answer_async/{public_id}/": {"get": {"operationId": "video_video_question_answer_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Question Answer Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Question Answer Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoquestion_answer_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/shot_change_detection_async/": {"get": {"operationId": "video_video_shot_change_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Video Shot Change Detection List Jobs", "tags": ["Shot Change Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_shot_change_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**google**|`Video intelligence v1`|0.05 (per 1 minute)|1 minute\n\n\n
\n\n", "summary": "Shot Change Detection Launch Job", "tags": ["Shot Change Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_shot_change_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Video Shot Change Detection delete Jobs", "tags": ["Shot Change Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/shot_change_detection_async/{public_id}/": {"get": {"operationId": "video_video_shot_change_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Video Shot Change Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Shot Change Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideoshot_change_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/video/text_detection_async/": {"get": {"operationId": "video_video_text_detection_async_retrieve", "description": "Get a list of all jobs launched for this feature. You'll then be able to use the ID of each one to get its status and results.
\n Please note that a **job status doesn't get updated until a get request** is sent.", "summary": "Text Detection List Jobs", "tags": ["Text Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ListAsyncJobResponse"}}}, "description": ""}}}, "post": {"operationId": "video_video_text_detection_async_create", "description": "
Available Providers\n\n\n\n|Provider|Version|Price|Billing unit|\n|----|-------|-----|------------|\n|**amazon**|`boto3`|0.1 (per 60 seconde)|60 seconde\n|**google**|`Video intelligence v1`|0.15 (per 60 seconde)|1 seconde\n|**twelvelabs**|`v1.1.2`|0.067 (per 60 seconde)|60 seconde\n\n\n
\n\n", "summary": "Text Detection Launch Job", "tags": ["Text Detection Async"], "requestBody": {"content": {"multipart/form-data": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}, "application/json": {"schema": {"$ref": "#/components/schemas/AsyncVideoAnalysisRequest"}}}, "required": true}, "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/LaunchAsyncJobResponse"}}}, "description": ""}}}, "delete": {"operationId": "video_video_text_detection_async_destroy", "description": "Generic class to handle method GET all async job for user\n\nAttributes:\n feature (str): EdenAI feature\n subfeature (str): EdenAI subfeature", "summary": "Text Detection delete Jobs", "tags": ["Text Detection Async"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"204": {"description": "No response body"}}}}, "/video/text_detection_async/{public_id}/": {"get": {"operationId": "video_video_text_detection_async_retrieve_2", "description": "Get the status and results of an async job given its ID.", "summary": "Text Detection Get Job Results", "parameters": [{"in": "path", "name": "public_id", "schema": {"type": "string"}, "required": true}, {"in": "query", "name": "response_as_dict", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_base_64", "schema": {"type": "boolean", "default": true}}, {"in": "query", "name": "show_original_response", "schema": {"type": "boolean", "default": false}}], "tags": ["Text Detection Async"], "security": [{"FeatureApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/asyncvideotext_detection_asyncResponseModel"}}}, "description": ""}, "400": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/BadRequest"}}}, "description": ""}, "500": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "403": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Error"}}}, "description": ""}, "404": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/NotFoundResponse"}}}, "description": ""}}}}, "/workflow/": {"get": {"operationId": "workflow_workflow_list", "description": "List all created workflows. \n\nNOTE: you can only create a workflow through our app", "summary": "List Workflows", "tags": ["Workflows"], "security": [{"FeatureApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"type": "array", "items": {"$ref": "#/components/schemas/Workflow"}}}}, "description": ""}}}}, "/workflow/{workflow_id}/": {"get": {"operationId": "workflow_workflow_retrieve", "description": "Get workflow Content (JSON representation)", "summary": "Retrieve a Workflow", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Workflows"], "security": [{"WorkflowShareApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/Workflow"}}}, "description": ""}}}, "delete": {"operationId": "workflow_workflow_destroy", "summary": "Delete a Workflow", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Workflows"], "security": [{"WorkflowShareApiAuth": []}], "responses": {"204": {"description": "No response body"}}}}, "/workflow/{workflow_id}/execution/": {"get": {"operationId": "workflow_workflow_execution_list", "summary": "List Executions", "parameters": [{"name": "page", "required": false, "in": "query", "description": "A page number within the paginated result set.", "schema": {"type": "integer"}}, {"name": "page_size", "required": false, "in": "query", "description": "Number of results to return per page.", "schema": {"type": "integer"}}, {"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Executions"], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PaginatedExecutionListList"}}}, "description": ""}}}, "post": {"operationId": "workflow_workflow_execution_create", "description": "\nLaunch a workflow Execution.\nif you set `input` in your workflow, you will need to launch the execution with these inputs as parameters\nFor example if you set a file_url as input with name \"my_invoice\", your request body will look like this:\n\n { my_invoice: \"https://example.com/public/invoice_123.pdf\" }\n ", "summary": "Create an Execution", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Executions"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ExecutionFakeCreateRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/ExecutionFakeCreateRequest"}}}}, "responses": {"201": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ExecutionExampleSuccessCreate"}}}, "description": ""}}}}, "/workflow/{workflow_id}/execution/{execution_id}/": {"get": {"operationId": "workflow_workflow_execution_retrieve", "description": "An Execution represents the process of running a workflow with specific input data. It tracks the workflow's progress, providing a unique id, a status (e.g., pending, running, completed), and a result once the execution is finished. You can retrieve an Execution by its id to monitor its status or access its result after completion", "summary": "Get an Execution by ID", "parameters": [{"in": "path", "name": "execution_id", "schema": {"type": "string", "format": "uuid"}, "required": true}, {"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Executions"], "security": [{"WorkflowShareApiAuth": []}, {}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/ExecutionExampleSuccessCreate"}}}, "description": ""}}}}, "/workflow/{workflow_id}/webhook/": {"put": {"operationId": "workflow_workflow_webhook_update", "summary": "Update workflow webhook URL", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Webhook"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/WebhookParametersRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/WebhookParametersRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/WebhookParametersRequest"}}}}, "security": [{"WorkflowShareApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/WebhookParameters"}}}, "description": ""}}}, "patch": {"operationId": "workflow_workflow_webhook_partial_update", "summary": "Update workflow webhook URL", "parameters": [{"in": "path", "name": "workflow_id", "schema": {"type": "string", "format": "uuid"}, "required": true}], "tags": ["Webhook"], "requestBody": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/PatchedWebhookParametersRequest"}}, "application/x-www-form-urlencoded": {"schema": {"$ref": "#/components/schemas/PatchedWebhookParametersRequest"}}, "multipart/form-data": {"schema": {"$ref": "#/components/schemas/PatchedWebhookParametersRequest"}}}}, "security": [{"WorkflowShareApiAuth": []}], "responses": {"200": {"content": {"application/json": {"schema": {"$ref": "#/components/schemas/WebhookParameters"}}}, "description": ""}}}}}, "components": {"schemas": {"AIProject": {"type": "object", "properties": {"project_id": {"type": "string", "format": "uuid", "readOnly": true}, "project_name": {"type": "string", "maxLength": 100}, "project_type": {"$ref": "#/components/schemas/ProjectTypeEnum"}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "user": {"type": "string", "format": "uuid"}}, "required": ["created_at", "project_id", "project_name", "project_type", "user"]}, "AddFileRequest": {"type": "object", "properties": {"data_type": {"$ref": "#/components/schemas/DataTypeEnum"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:pdf|csv|amr|flac|wav|ogg|mp3|mp4|webm|xml)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "metadata": {"type": "string", "minLength": 1, "description": "Optional parameter: Attach metadata to the uploaded file data in your database. Provide a stringified JSON with key-value pairs. Useful in `filter_document` when querying the language model, it allows you to filter data with your Chatbot by considering only documents that have the specified metadata."}, "provider": {"type": "string", "nullable": true, "minLength": 1, "description": "Select a provider to use, only for audio (speech-to-text) & pdf (ocr-async) files."}}, "required": ["data_type"]}, "AddImageRequest": {"type": "object", "properties": {"metadata": {"type": "string", "minLength": 1, "description": "Optional parameter: Attach metadata to the uploaded data in your database. Provide a stringified JSON with key-value pairs. Useful in `filter_document` when querying the language model, it allows you to filter data with your Chatbot by considering only documents that have the specified metadata. Key 'collection_name' is not allowed."}, "model": {"type": "string", "nullable": true, "minLength": 1, "default": "gemini-2.0-flash", "description": "Optional. Select the Language Model provider (e.g., `google/gemini-2.0-flash`, `openai/gpt-4-vision-preview`) to use for describing the uploaded content. This model will analyze the content and generate a text description, which will be stored in the RAG database. If left blank, a default model is used."}, "prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Optional. Specify a custom prompt to guide the Language Model in generating the description for the content. If no prompt is provided, a default prompt will be used to describe what's in the image/video. This allows for more specific or targeted analysis."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|png|webp|heic|heif)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}}, "AddTextRequest": {"type": "object", "properties": {"texts": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "LLM Query"}, "metadata": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "default": []}}, "required": ["texts"]}, "AddUrlRequest": {"type": "object", "properties": {"urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1}, "description": "Add multiple urls into the database, it loads all the text from HTML webpages into a document format."}, "js_render": {"type": "array", "items": {"type": "boolean"}, "description": "Enable JavaScript rendering for the provided URLs."}, "metadata": {"type": "array", "items": {"type": "object", "additionalProperties": {}}}}, "required": ["urls"]}, "AddVideoRequest": {"type": "object", "properties": {"metadata": {"type": "string", "minLength": 1, "description": "Optional parameter: Attach metadata to the uploaded data in your database. Provide a stringified JSON with key-value pairs. Useful in `filter_document` when querying the language model, it allows you to filter data with your Chatbot by considering only documents that have the specified metadata. Key 'collection_name' is not allowed."}, "model": {"type": "string", "nullable": true, "minLength": 1, "default": "gemini-2.0-flash", "description": "Optional. Select the Language Model provider (e.g., `google/gemini-2.0-flash`, `openai/gpt-4-vision-preview`) to use for describing the uploaded content. This model will analyze the content and generate a text description, which will be stored in the RAG database. If left blank, a default model is used."}, "prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Optional. Specify a custom prompt to guide the Language Model in generating the description for the content. If no prompt is provided, a default prompt will be used to describe what's in the image/video. This allows for more specific or targeted analysis."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mov|mp4|mpeg|avi|x\\-flv|mpg|webm|wmv|3gpp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}}, "AiDetectionItem": {"properties": {"text": {"title": "Text", "type": "string"}, "prediction": {"title": "Prediction", "type": "string"}, "ai_score": {"title": "Ai Score", "type": "integer"}, "ai_score_detail": {"title": "Ai Score Detail", "type": "integer"}}, "required": ["text", "prediction", "ai_score", "ai_score_detail"], "title": "AiDetectionItem", "type": "object"}, "AiProductFile": {"type": "object", "properties": {"file_id": {"type": "string", "format": "uuid", "readOnly": true}, "user": {"type": "string", "readOnly": true}, "project": {"type": "string", "readOnly": true}, "file_type": {"type": "string", "maxLength": 255}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "status": {"nullable": true, "oneOf": [{"$ref": "#/components/schemas/Status889Enum"}, {"$ref": "#/components/schemas/NullEnum"}]}, "error": {"type": "string", "nullable": true}, "task_id": {"type": "string", "nullable": true, "maxLength": 100}, "task_started_at": {"type": "string", "format": "date-time", "nullable": true}, "file_path": {"type": "string", "nullable": true}, "file_name": {"type": "string", "nullable": true, "maxLength": 1500}}, "required": ["created_at", "file_id", "file_type", "project", "user"]}, "AnonymizationAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "provider_params": {"type": "string", "description": "\nParameters specific to the provider that you want to send along the request.\n\nit should take a *provider* name as key and an object of parameters as value.\n\nExample:\n\n {\n \"deepgram\": {\n \"filler_words\": true,\n \"smart_format\": true,\n \"callback\": \"https://webhook.site/0000\"\n },\n \"assembly\": {\n \"webhook_url\": \"https://webhook.site/0000\"\n }\n }\n\nPlease refer to the documentation of each provider to see which parameters to send.\n"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}}, "required": ["providers"]}, "AnonymizationBoundingBox": {"properties": {"x_min": {"title": "X Min", "type": "integer"}, "x_max": {"title": "X Max", "type": "integer"}, "y_min": {"title": "Y Min", "type": "integer"}, "y_max": {"title": "Y Max", "type": "integer"}}, "required": ["x_min", "x_max", "y_min", "y_max"], "title": "AnonymizationBoundingBox", "type": "object"}, "AnonymizationEntity": {"description": "This model represents an entity extracted from the text.\n\nAttributes:\n offset (int): The offset of the entity in the text.\n length (int): The lenght of the entity in the text.\n category (CategoryType): The category of the entity.\n subcategory (SubCategoryType): The subcategory of the entity.\n original_label (str): The original label of the entity.\n content (str): The content of the entity.", "properties": {"offset": {"minimum": 0, "title": "Offset", "type": "integer"}, "length": {"exclusiveMinimum": true, "title": "Length", "type": "integer"}, "category": {"$ref": "#/components/schemas/CategoryType"}, "subcategory": {"$ref": "#/components/schemas/SubCategoryType"}, "original_label": {"minLength": 1, "title": "Original Label", "type": "string"}, "content": {"minLength": 1, "title": "Content", "type": "string"}, "confidence_score": {"maximum": 1.0, "minimum": 0.0, "title": "Confidence Score", "type": "integer"}}, "required": ["offset", "length", "category", "subcategory", "original_label", "content", "confidence_score"], "title": "AnonymizationEntity", "type": "object"}, "AnonymizationItem": {"properties": {"kind": {"title": "Kind", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "bounding_boxes": {"$ref": "#/components/schemas/AnonymizationBoundingBox"}}, "required": ["kind", "confidence", "bounding_boxes"], "title": "AnonymizationItem", "type": "object"}, "AskLLMRequest": {"type": "object", "properties": {"query": {"type": "string", "minLength": 1, "description": "Enter your question or query about the data. The large language model (LLM) will provide a response."}, "llm_provider": {"type": "string", "minLength": 1, "description": "Select a provider for the large language model for processing. Leave empty for default."}, "llm_model": {"type": "string", "minLength": 1, "description": "Specify the model to use for language processing. Leave empty for default."}, "k": {"type": "integer", "minimum": 1, "default": 3, "description": "How many results chunk you want to return"}, "history": {"type": "array", "items": {"type": "object", "additionalProperties": {}, "description": "A dictionary representing a single conversation in the previous history. Must contain 'user' and 'assistant' fields."}, "default": [], "description": "A list containing all the previous conversations between the user and the chatbot AI. Each item in the list should be a dictionary with two keys: 'user' and 'assistant'."}, "chatbot_global_action": {"type": "string", "minLength": 1, "description": "A system message that helps set the behavior of the assistant."}, "filter_documents": {"type": "object", "additionalProperties": {}, "default": {}, "description": "Filter uploaded documents based on their metadata. Specify key-value pairs where the key represents the metadata field and the value is the desired metadata value. Please ensure that the provided metadata keys are available in your database."}, "min_score": {"type": "number", "format": "double", "default": 0.0, "description": "A minimum score threshold for the model to consider a chunk as a valid response. Higher values mean the model will be more conservative and only return chunks that are more similar to the query. Lower values mean the model will be more open to returning chunks that are less similar to the query."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 1024, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "conversation_id": {"type": "string", "format": "uuid"}, "use_reranking": {"type": "boolean", "default": false, "description": "Activate/deactivate the reranking. This is experimental."}, "top_n": {"type": "integer", "default": 3, "description": "The number of documents returned by the reranker (experimental)"}}, "required": ["query"]}, "AskYodaProjectUpdate": {"type": "object", "properties": {"ocr_provider": {"type": "string", "default": "amazon"}, "speech_to_text_provider": {"type": "string", "default": "openai"}, "llm_provider": {"type": "string", "description": "Select a default LLM provider to use in your project."}, "llm_model": {"type": "string", "description": "Select a default Model for LLM provider to use in your project"}, "chunk_size": {"type": "integer", "maximum": 10000, "minimum": 1, "nullable": true}, "chunk_separators": {"type": "array", "items": {"type": "string"}, "nullable": true}}}, "AskYourDataProjectRequest": {"type": "object", "properties": {"credential": {"type": "string", "nullable": true, "description": "The credential resource name", "maxLength": 255}, "asset": {"type": "string", "nullable": true, "description": "The asset sub_resource name", "maxLength": 255}, "ocr_provider": {"type": "string", "minLength": 1, "default": "amazon"}, "speech_to_text_provider": {"type": "string", "minLength": 1, "default": "openai"}, "llm_provider": {"type": "string", "minLength": 1, "description": "Select a default LLM provider to use in your project."}, "llm_model": {"type": "string", "minLength": 1, "description": "Select a default Model for LLM provider to use in your project"}, "chunk_size": {"type": "integer", "maximum": 10000, "minimum": 1, "nullable": true}, "chunk_separators": {"type": "array", "items": {"type": "string"}, "nullable": true}, "project_name": {"type": "string", "minLength": 1, "description": "Project name"}, "collection_name": {"type": "string", "minLength": 1, "description": "Database Collection Name"}, "db_provider": {"allOf": [{"$ref": "#/components/schemas/DbProviderEnum"}], "default": "qdrant", "description": "Database Provider\n\n* `qdrant` - qdrant\n* `supabase` - supabase"}, "embeddings_provider": {"allOf": [{"$ref": "#/components/schemas/EmbeddingsProviderEnum"}], "description": "Select an embedding provider to use in your search database. Leave empty for default.\n\n* `openai` - openai\n* `cohere` - cohere\n* `google` - google\n* `mistral` - mistral\n* `jina` - jina"}}, "required": ["collection_name", "embeddings_provider", "project_name"]}, "AssetCreate": {"type": "object", "properties": {"sub_resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AssetCreateRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AssetList": {"type": "object", "properties": {"sub_resource": {"type": "string", "maxLength": 255}, "data": {"type": "string", "format": "byte", "readOnly": true}}, "required": ["data", "sub_resource"]}, "AssetListRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}}, "required": ["sub_resource"]}, "AssetUpdate": {"type": "object", "properties": {"sub_resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AssetUpdateRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}, "required": ["data", "sub_resource"]}, "AsyncJobList": {"type": "object", "properties": {"providers": {"type": "string"}, "nb": {"type": "integer"}, "nb_ok": {"type": "integer"}, "public_id": {"type": "string", "format": "uuid"}, "state": {"$ref": "#/components/schemas/StateEnum"}, "created_at": {"type": "string", "format": "date-time"}}, "required": ["created_at", "nb", "nb_ok", "providers", "public_id", "state"]}, "AsyncOcrRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}}, "required": ["providers"]}, "AsyncVideoAnalysisRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mov|mp4|avi)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "AutomlClassificationListProjectsResponse": {"properties": {"projects": {"items": {"$ref": "#/components/schemas/AutomlClassificationProject"}, "title": "Projects", "type": "array"}}, "required": ["projects"], "title": "AutomlClassificationListProjectsResponse", "type": "object"}, "AutomlClassificationPredictRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["project_id", "providers"]}, "AutomlClassificationProject": {"properties": {"project_id": {"format": "uuid", "title": "Project Id", "type": "string"}, "name": {"title": "Name", "type": "string"}, "provider": {"title": "Provider", "type": "string"}}, "required": ["project_id", "name", "provider"], "title": "AutomlClassificationProject", "type": "object"}, "AutomlClassificationTrainRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}}, "required": ["project_id", "providers"]}, "AutomlClassificationUploadDataRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}, "label": {"type": "string", "minLength": 1, "description": "Label of the image", "maxLength": 200}, "type_of_data": {"allOf": [{"$ref": "#/components/schemas/TypeOfDataEnum"}], "description": "Type of data : TRAINING or TEST\n\n* `TRAINING` - TRAINING\n* `TEST` - TEST"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["label", "project_id", "providers", "type_of_data"]}, "BadRequest": {"type": "object", "properties": {"error": {"$ref": "#/components/schemas/NestedBadRequest"}}, "required": ["error"]}, "BatchLaunchFailedRequest": {"properties": {"name": {"description": "Request name, if any were given", "title": "Name", "type": "string"}, "public_id": {"description": "Request ID", "title": "Public Id", "type": "integer"}, "body": {"description": "Parameters passed to the request", "title": "Body", "type": "object"}, "errors": {"description": "Error received from the request validator", "title": "Errors", "type": "object"}}, "required": ["name", "public_id", "body", "errors"], "title": "BatchLaunchFailedRequest", "type": "object"}, "BatchLaunchResponse": {"properties": {"job_id": {"description": "Job ID/name", "title": "Job Id", "type": "string"}, "nb_launched": {"description": "Number of successfully launched requests", "title": "Nb Launched", "type": "integer"}, "nb_failed": {"description": "Number of failed_requests", "title": "Nb Failed", "type": "integer"}, "total": {"description": "Total number of requests sent", "title": "Total", "type": "integer"}, "failed_requests": {"description": "if any requests failed, they will be shown in this list", "items": {"$ref": "#/components/schemas/BatchLaunchFailedRequest"}, "title": "Failed Requests", "type": "array"}}, "required": ["job_id", "nb_launched", "nb_failed", "total", "failed_requests"], "title": "BatchLaunchResponse", "type": "object"}, "BatchList": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 1023}, "status": {"$ref": "#/components/schemas/Status889Enum"}, "feature": {"type": "string", "readOnly": true}, "subfeature": {"type": "string", "readOnly": true}, "total_requests": {"type": "integer", "readOnly": true}, "nb_processing": {"type": "integer", "readOnly": true}, "nb_succeeded": {"type": "integer", "readOnly": true}, "nb_failed": {"type": "integer", "readOnly": true}, "get_response_url": {"type": "string", "readOnly": true}}, "required": ["feature", "get_response_url", "nb_failed", "nb_processing", "nb_succeeded", "subfeature", "total_requests"]}, "BatchRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "requests": {"type": "array", "items": {"type": "object", "additionalProperties": {}}}}, "required": ["requests"]}, "BatchResponseRequest": {"type": "object", "properties": {"public_id": {"type": "integer", "readOnly": true}, "status": {"$ref": "#/components/schemas/Status889Enum"}, "name": {"type": "string", "nullable": true, "maxLength": 1023}, "errors": {"nullable": true}, "response": {"nullable": true}}, "required": ["public_id"]}, "BlankEnum": {"enum": [""]}, "BoundingBox": {"description": "Bounding box of a word in the image\n\nAttributes:\n left (float): Left coordinate of the bounding box\n top (float): Top coordinate of the bounding box\n width (float): Width of the bounding box\n height (float): Height of the bounding box\n text (str): Text detected in the bounding box\n\nConstructor:\n from_json (classmethod): Create a new instance of BoundingBox from a JSON object\n from_normalized_vertices (classmethod): Create a new instance of BoundingBox from normalized vertices\n unknown (classmethod): Return a invalid bouding_box with all field filled with `-1`", "properties": {"left": {"description": "Left coordinate of the bounding box", "title": "Left", "type": "integer"}, "top": {"description": "Top coordinate of the bounding box", "title": "Top", "type": "integer"}, "width": {"description": "Width of the bounding box", "title": "Width", "type": "integer"}, "height": {"description": "Height of the bounding box", "title": "Height", "type": "integer"}}, "required": ["left", "top", "width", "height"], "title": "BoundingBox", "type": "object"}, "Bounding_box": {"properties": {"text": {"title": "Text", "type": "string"}, "left": {"title": "Left", "type": "integer"}, "top": {"title": "Top", "type": "integer"}, "width": {"title": "Width", "type": "integer"}, "height": {"title": "Height", "type": "integer"}}, "required": ["text", "left", "top", "width", "height"], "title": "Bounding_box", "type": "object"}, "BoundixBoxOCRTable": {"properties": {"left": {"title": "Left", "type": "integer"}, "top": {"title": "Top", "type": "integer"}, "width": {"title": "Width", "type": "integer"}, "height": {"title": "Height", "type": "integer"}}, "required": ["left", "top", "width", "height"], "title": "BoundixBoxOCRTable", "type": "object"}, "CategoryType": {"description": "This enum are used to categorize the explicit content extracted from the text", "enum": ["Toxic", "Content", "Sexual", "Violence", "DrugAndAlcohol", "Finance", "HateAndExtremism", "Safe", "Other"], "title": "CategoryType", "type": "string"}, "Cell": {"properties": {"text": {"title": "Text", "type": "string"}, "row_index": {"title": "Row Index", "type": "integer"}, "col_index": {"title": "Col Index", "type": "integer"}, "row_span": {"title": "Row Span", "type": "integer"}, "col_span": {"title": "Col Span", "type": "integer"}, "confidence": {"title": "Confidence", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/BoundixBoxOCRTable"}, "is_header": {"default": false, "title": "Is Header", "type": "boolean"}}, "required": ["text", "row_index", "col_index", "row_span", "col_span", "confidence", "bounding_box"], "title": "Cell", "type": "object"}, "ChatAvailableToolsRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The name of your tool/function"}, "description": {"type": "string"}, "parameters": {"description": "The tool's parameters are specified using a JSON Schema object. Detailed format documentation is available in the [JSON Schema reference](https://json-schema.org/understanding-json-schema/).\n\n**Make sure to well describe each parameter for best results.**\n\n\nExample for a weather tool:\n\n {\n \"type\": \"object\",\n \"properties\": {\n \"location\": {\n \"type\": \"string\"\n \"description\": \"The geographical location for which weather data is requested.\"\n },\n \"unit\": {\n \"type\": \"string\", \"enum\": [\"Celsius\", \"Fahrenheit\"]\n \"description\": \"The unit of measurement for temperature.\"\n }\n },\n \"required\": [\"location\"]\n }\n "}}}, "ChatCompletionChoice": {"properties": {"index": {"description": "The index of this completion choice", "title": "Index", "type": "integer"}, "message": {"allOf": [{"$ref": "#/components/schemas/ChatMessage"}], "description": "The chat completion message"}, "finish_reason": {"description": "The reason the completion finished: 'stop', 'length', 'tool_calls', 'content_filter', or 'function_call'", "title": "Finish Reason", "type": "string"}}, "required": ["index", "message", "finish_reason"], "title": "ChatCompletionChoice", "type": "object"}, "ChatCompletionUsage": {"properties": {"completion_tokens_details": {"allOf": [{"$ref": "#/components/schemas/UsageTokensDetails"}], "description": "Number of tokens in the generated completion"}, "prompt_tokens_details": {"allOf": [{"$ref": "#/components/schemas/UsageTokensDetails"}], "description": "Number of tokens in the prompt"}, "total_tokens": {"description": "Total number of tokens used (prompt + completion)", "title": "Total Tokens", "type": "integer"}}, "required": ["completion_tokens_details", "prompt_tokens_details", "total_tokens"], "title": "ChatCompletionUsage", "type": "object"}, "ChatMessage": {"properties": {"role": {"allOf": [{"$ref": "#/components/schemas/ChatRole"}], "description": "The role of the message author"}, "content": {"default": null, "description": "The content of the message", "title": "Content", "type": "string"}, "name": {"default": null, "description": "The name of the author of this message", "title": "Name", "type": "string"}, "function_call": {"default": null, "description": "The function call information", "title": "Function Call", "type": "object"}, "tool_calls": {"default": null, "description": "The tool call information", "items": {"type": "object"}, "title": "Tool Calls", "type": "array"}}, "required": ["role"], "title": "ChatMessage", "type": "object"}, "ChatMessageContent": {"properties": {"media_url": {"default": null, "title": "Media Url", "type": "string"}, "media_base64": {"default": null, "title": "Media Base64", "type": "string"}, "text": {"default": null, "title": "Text", "type": "string"}, "media_type": {"default": null, "title": "Media Type", "type": "string"}}, "title": "ChatMessageContent", "type": "object"}, "ChatMessageDataClass": {"properties": {"role": {"title": "Role", "type": "string"}, "content": {"items": {"$ref": "#/components/schemas/ChatMessage"}, "title": "Content", "type": "array"}}, "required": ["role"], "title": "ChatMessageDataClass", "type": "object"}, "ChatMessageRequest": {"type": "object", "properties": {"role": {"type": "string", "minLength": 1}, "message": {}, "tools": {"type": "array", "items": {"$ref": "#/components/schemas/ChatAvailableToolsRequest"}, "nullable": true}, "tool_calls": {"type": "array", "items": {"$ref": "#/components/schemas/ChatToolCallsRequest"}, "nullable": true}}, "required": ["message", "role"]}, "ChatRole": {"enum": ["system", "user", "assistant", "function", "tool"], "title": "ChatRole", "type": "string"}, "ChatToolCallsRequest": {"type": "object", "properties": {"id": {"type": "string", "minLength": 1}, "name": {"type": "string", "minLength": 1}, "arguments": {"type": "string", "minLength": 1}}, "required": ["arguments", "id", "name"]}, "ChatToolResultRequest": {"type": "object", "properties": {"id": {"type": "string", "minLength": 1, "description": "the id of the `tool_call` used to generate result"}, "result": {"type": "string", "minLength": 1, "description": "the result of your function"}}, "required": ["id", "result"]}, "ContentNSFW": {"properties": {"timestamp": {"title": "Timestamp", "type": "integer"}, "confidence": {"title": "Confidence", "type": "integer"}, "category": {"title": "Category", "type": "string"}}, "required": ["timestamp", "confidence", "category"], "title": "ContentNSFW", "type": "object"}, "Conversation": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "name": {"type": "string", "nullable": true, "maxLength": 255}}, "required": ["id"]}, "ConversationDetail": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "name": {"type": "string", "nullable": true, "maxLength": 255}, "messages": {"type": "array", "items": {"$ref": "#/components/schemas/Message"}, "readOnly": true}}, "required": ["id", "messages"]}, "ConversationDetailRequest": {"type": "object", "properties": {"name": {"type": "string", "nullable": true, "maxLength": 255}}}, "ConversationRequest": {"type": "object", "properties": {"name": {"type": "string", "nullable": true, "maxLength": 255}}}, "CostMonitoringResponse": {"properties": {"response": {"items": {"$ref": "#/components/schemas/TokenData"}, "title": "Response", "type": "array"}}, "required": ["response"], "title": "CostMonitoringResponse", "type": "object"}, "Country": {"properties": {"name": {"title": "Name", "type": "string"}, "alpha2": {"title": "Alpha2", "type": "string"}, "alpha3": {"title": "Alpha3", "type": "string"}, "confidence": {"default": null, "title": "Confidence", "type": "integer"}}, "required": ["name", "alpha2", "alpha3"], "title": "Country", "type": "object"}, "CustomDocumentParsingAsyncBoundingBox": {"properties": {"left": {"title": "Left", "type": "integer"}, "top": {"title": "Top", "type": "integer"}, "width": {"title": "Width", "type": "integer"}, "height": {"title": "Height", "type": "integer"}}, "required": ["left", "top", "width", "height"], "title": "CustomDocumentParsingAsyncBoundingBox", "type": "object"}, "CustomDocumentParsingAsyncItem": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "value": {"title": "Value", "type": "string"}, "query": {"title": "Query", "type": "string"}, "bounding_box": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncBoundingBox"}, "page": {"title": "Page", "type": "integer"}}, "required": ["confidence", "value", "query", "bounding_box", "page"], "title": "CustomDocumentParsingAsyncItem", "type": "object"}, "CustomDocumentParsingAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "queries": {"type": "string", "minLength": 1, "description": "Your queries need to be a list of dict containing the questions you want answered and the page to look for the information in : '[{'query':'your query','pages':'your pages'},{'query':'your query','pages':'your pages'}]'"}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers", "queries"]}, "CustomTokenUpdate": {"type": "object", "properties": {"name": {"type": "string", "readOnly": true, "description": "The token name"}, "token_type": {"allOf": [{"$ref": "#/components/schemas/TokenTypeEnum"}], "readOnly": true}, "balance": {"type": "number", "format": "double", "maximum": 100000, "minimum": -100000, "exclusiveMaximum": true, "exclusiveMinimum": true, "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}, "required": ["name", "token_type"]}, "CustomTokensCreate": {"type": "object", "properties": {"name": {"type": "string", "description": "The token name", "maxLength": 200}, "token_type": {"$ref": "#/components/schemas/TokenTypeEnum"}, "balance": {"type": "string", "format": "decimal", "pattern": "^-?\\d{0,5}(?:\\.\\d{0,9})?$", "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}, "required": ["name"]}, "CustomTokensCreateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The token name", "maxLength": 200}, "token_type": {"$ref": "#/components/schemas/TokenTypeEnum"}, "balance": {"type": "string", "format": "decimal", "pattern": "^-?\\d{0,5}(?:\\.\\d{0,9})?$", "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}, "required": ["name"]}, "CustomTokensList": {"type": "object", "properties": {"name": {"type": "string", "description": "The token name", "maxLength": 200}, "token": {"type": "string", "nullable": true, "maxLength": 2000}, "token_type": {"$ref": "#/components/schemas/TokenTypeEnum"}, "balance": {"type": "number", "format": "double", "maximum": 100000, "minimum": -100000, "exclusiveMaximum": true, "exclusiveMinimum": true, "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}}, "required": ["name"]}, "DataTypeEnum": {"enum": ["pdf", "audio", "csv", "xml"], "type": "string", "description": "* `pdf` - pdf\n* `audio` - audio\n* `csv` - csv\n* `xml` - xml"}, "DbProviderEnum": {"enum": ["qdrant", "supabase"], "type": "string", "description": "* `qdrant` - qdrant\n* `supabase` - supabase"}, "DetailPerFrame": {"properties": {"position": {"title": "Position", "type": "integer"}, "score": {"maximum": 1.0, "minimum": 0.0, "title": "Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/PredictionB20Enum"}], "title": "Prediction"}}, "required": ["position", "score", "prediction"], "title": "DetailPerFrame", "type": "object"}, "DetailTypeEnum": {"enum": ["resolution", "document_type"], "type": "string", "description": "* `resolution` - Resolution\n* `document_type` - Document Type"}, "Details": {"properties": {"total_cost": {"title": "Total Cost", "type": "integer"}, "details": {"title": "Details", "type": "integer"}, "cost_per_provider": {"additionalProperties": {"type": "integer"}, "title": "Cost Per Provider", "type": "object"}}, "required": ["total_cost", "details", "cost_per_provider"], "title": "Details", "type": "object"}, "DocumentTypeEnum": {"enum": ["auto-detect", "invoice", "receipt"], "type": "string", "description": "* `auto-detect` - auto-detect\n* `invoice` - invoice\n* `receipt` - receipt"}, "EmbeddingDataClass": {"properties": {"embedding": {"items": {"type": "integer"}, "title": "Embedding", "type": "array"}}, "required": ["embedding"], "title": "EmbeddingDataClass", "type": "object"}, "EmbeddingsProviderEnum": {"enum": ["openai", "cohere", "google", "mistral", "jina"], "type": "string", "description": "* `openai` - openai\n* `cohere` - cohere\n* `google` - google\n* `mistral` - mistral\n* `jina` - jina"}, "EmotionItem": {"description": "This class is used in EmotionAnalysisDataClass to list emotion analysed.\nArgs:\n - emotion (EmotionEnum): emotion of the text\n - emotion_score (float): score of the emotion", "properties": {"emotion": {"title": "Emotion", "type": "string"}, "emotion_score": {"maximum": 100.0, "minimum": 0.0, "title": "Emotion Score", "type": "integer"}}, "required": ["emotion", "emotion_score"], "title": "EmotionItem", "type": "object"}, "Entity": {"properties": {"type": {"description": "Recognized Entity type", "title": "Type", "type": "string"}, "text": {"description": "Text corresponding to the entity", "title": "Text", "type": "string"}, "sentiment": {"allOf": [{"$ref": "#/components/schemas/EntitySentimentEnum"}], "title": "Sentiment"}, "begin_offset": {"default": null, "title": "Begin Offset", "type": "integer"}, "end_offset": {"default": null, "title": "End Offset", "type": "integer"}}, "required": ["type", "text", "sentiment"], "title": "Entity", "type": "object"}, "EntitySentimentEnum": {"enum": ["Positive", "Negative", "Neutral", "Mixed"], "type": "string"}, "Error": {"type": "object", "properties": {"error": {"$ref": "#/components/schemas/NestedError"}}, "required": ["error"]}, "ExecutionContentCreate": {"type": "object", "properties": {"status": {"$ref": "#/components/schemas/ExecutionContentCreateStatusEnum"}, "results": {}, "last_node_executed": {"type": "string"}}, "required": ["last_node_executed", "results", "status"]}, "ExecutionContentCreateStatusEnum": {"enum": ["success", "failed", "processing"], "type": "string", "description": "* `success` - Success\n* `failed` - Failed\n* `processing` - Processing"}, "ExecutionExampleSuccessCreate": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "id": {"type": "string"}, "content": {"$ref": "#/components/schemas/ExecutionContentCreate"}, "created_at": {"type": "string", "format": "date-time"}, "updated_at": {"type": "string", "format": "date-time"}}, "required": ["content", "created_at", "id", "updated_at"]}, "ExecutionFakeCreateRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "my_invoice": {"type": "string", "minLength": 1, "default": "https://example.com/public/invoice_123.pdf", "description": "An example of an input parameter specified in the input node"}}}, "ExecutionList": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "status": {"type": "string", "nullable": true, "maxLength": 30}, "created_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}}, "required": ["created_at", "id", "updated_at"]}, "ExplicitItem": {"properties": {"label": {"description": "", "title": "Label", "type": "string"}, "likelihood": {"description": "", "title": "Likelihood", "type": "integer"}, "likelihood_score": {"description": "", "title": "Likelihood Score", "type": "integer"}, "category": {"allOf": [{"$ref": "#/components/schemas/CategoryType"}], "description": "The category of the detected content. Possible values include: 'Toxic', 'Content', 'Sexual', 'Violence', 'DrugAndAlcohol', 'Finance', 'HateAndExtremism', 'Safe', 'Other'."}, "subcategory": {"description": "The subcategory of content. Possible values:\n\nToxic Subcategories:\n- Insult\n- Obscene\n- Derogatory\n- Profanity\n- Threat\n- Toxic\n\nContent Subcategories:\n- MiddleFinger\n- PublicSafety\n- Health\n- Explicit\n- QRCode\n- Medical\n- Politics\n- Legal\n\nSexual Subcategories:\n- SexualActivity\n- SexualSituations\n- Nudity\n- PartialNudity\n- Suggestive\n- AdultToys\n- RevealingClothes\n- Sexual\n\nViolence Subcategories:\n- GraphicViolenceOrGore\n- PhysicalViolence\n- WeaponViolence\n- Violence\n\nDrug and Alcohol Subcategories:\n- DrugProducts\n- DrugUse\n- Tobacco\n- Smoking\n- Alcohol\n- Drinking\n\nFinance Subcategories:\n- Gambling\n- Finance\n- MoneyContent\n\nHate and Extremism Subcategories:\n- Hate\n- Harassment\n- Threatening\n- Extremist\n- Racy\n\nSafe Subcategories:\n- Safe\n- NotSafe\n\nOther Subcategories:\n- Spoof\n- Religion\n- Offensive\n- Other", "title": "Subcategory", "type": "string"}}, "required": ["label", "likelihood", "likelihood_score", "category", "subcategory"], "title": "ExplicitItem", "type": "object"}, "ExtractedTopic": {"properties": {"category": {"title": "Category", "type": "string"}, "importance": {"title": "Importance", "type": "integer"}}, "required": ["category", "importance"], "title": "ExtractedTopic", "type": "object"}, "FaceAccessories": {"properties": {"sunglasses": {"title": "Sunglasses", "type": "integer"}, "reading_glasses": {"title": "Reading Glasses", "type": "integer"}, "swimming_goggles": {"title": "Swimming Goggles", "type": "integer"}, "face_mask": {"title": "Face Mask", "type": "integer"}, "eyeglasses": {"title": "Eyeglasses", "type": "integer"}, "headwear": {"title": "Headwear", "type": "integer"}}, "required": ["sunglasses", "reading_glasses", "swimming_goggles", "face_mask", "eyeglasses", "headwear"], "title": "FaceAccessories", "type": "object"}, "FaceAttributes": {"properties": {"headwear": {"title": "Headwear", "type": "integer"}, "frontal_gaze": {"title": "Frontal Gaze", "type": "integer"}, "eyes_visible": {"title": "Eyes Visible", "type": "integer"}, "glasses": {"title": "Glasses", "type": "integer"}, "mouth_open": {"title": "Mouth Open", "type": "integer"}, "smiling": {"title": "Smiling", "type": "integer"}, "brightness": {"title": "Brightness", "type": "integer"}, "sharpness": {"title": "Sharpness", "type": "integer"}, "pose": {"$ref": "#/components/schemas/VideoFacePoses"}}, "required": ["headwear", "frontal_gaze", "eyes_visible", "glasses", "mouth_open", "smiling", "brightness", "sharpness", "pose"], "title": "FaceAttributes", "type": "object"}, "FaceBoundingBox": {"properties": {"x_min": {"title": "X Min", "type": "integer"}, "x_max": {"title": "X Max", "type": "integer"}, "y_min": {"title": "Y Min", "type": "integer"}, "y_max": {"title": "Y Max", "type": "integer"}}, "required": ["x_min", "x_max", "y_min", "y_max"], "title": "FaceBoundingBox", "type": "object"}, "FaceCompareBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "FaceCompareBoundingBox", "type": "object"}, "FaceEmotions": {"properties": {"joy": {"title": "Joy", "type": "integer"}, "sorrow": {"title": "Sorrow", "type": "integer"}, "anger": {"title": "Anger", "type": "integer"}, "surprise": {"title": "Surprise", "type": "integer"}, "disgust": {"title": "Disgust", "type": "integer"}, "fear": {"title": "Fear", "type": "integer"}, "confusion": {"title": "Confusion", "type": "integer"}, "calm": {"title": "Calm", "type": "integer"}, "unknown": {"title": "Unknown", "type": "integer"}, "neutral": {"title": "Neutral", "type": "integer"}, "contempt": {"title": "Contempt", "type": "integer"}}, "required": ["joy", "sorrow", "anger", "surprise", "disgust", "fear", "confusion", "calm", "unknown", "neutral", "contempt"], "title": "FaceEmotions", "type": "object"}, "FaceFacialHair": {"properties": {"moustache": {"title": "Moustache", "type": "integer"}, "beard": {"title": "Beard", "type": "integer"}, "sideburns": {"title": "Sideburns", "type": "integer"}}, "required": ["moustache", "beard", "sideburns"], "title": "FaceFacialHair", "type": "object"}, "FaceFeatures": {"properties": {"eyes_open": {"title": "Eyes Open", "type": "integer"}, "smile": {"title": "Smile", "type": "integer"}, "mouth_open": {"title": "Mouth Open", "type": "integer"}}, "required": ["eyes_open", "smile", "mouth_open"], "title": "FaceFeatures", "type": "object"}, "FaceHair": {"properties": {"hair_color": {"items": {"$ref": "#/components/schemas/FaceHairColor"}, "title": "Hair Color", "type": "array"}, "bald": {"title": "Bald", "type": "integer"}, "invisible": {"title": "Invisible", "type": "boolean"}}, "required": ["bald", "invisible"], "title": "FaceHair", "type": "object"}, "FaceHairColor": {"properties": {"color": {"title": "Color", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["color", "confidence"], "title": "FaceHairColor", "type": "object"}, "FaceItem": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "landmarks": {"$ref": "#/components/schemas/FaceLandmarks"}, "emotions": {"$ref": "#/components/schemas/FaceEmotions"}, "poses": {"$ref": "#/components/schemas/FacePoses"}, "age": {"title": "Age", "type": "integer"}, "gender": {"title": "Gender", "type": "string"}, "bounding_box": {"$ref": "#/components/schemas/FaceBoundingBox"}, "hair": {"$ref": "#/components/schemas/FaceHair"}, "facial_hair": {"$ref": "#/components/schemas/FaceFacialHair"}, "quality": {"$ref": "#/components/schemas/FaceQuality"}, "makeup": {"$ref": "#/components/schemas/FaceMakeup"}, "accessories": {"$ref": "#/components/schemas/FaceAccessories"}, "occlusions": {"$ref": "#/components/schemas/FaceOcclusions"}, "features": {"$ref": "#/components/schemas/FaceFeatures"}}, "required": ["confidence", "landmarks", "emotions", "poses", "age", "gender", "bounding_box", "hair", "facial_hair", "quality", "makeup", "accessories", "occlusions", "features"], "title": "FaceItem", "type": "object"}, "FaceLandmarks": {"properties": {"left_eye": {"items": {"type": "integer"}, "title": "Left Eye", "type": "array"}, "left_eye_top": {"items": {"type": "integer"}, "title": "Left Eye Top", "type": "array"}, "left_eye_right": {"items": {"type": "integer"}, "title": "Left Eye Right", "type": "array"}, "left_eye_bottom": {"items": {"type": "integer"}, "title": "Left Eye Bottom", "type": "array"}, "left_eye_left": {"items": {"type": "integer"}, "title": "Left Eye Left", "type": "array"}, "right_eye": {"items": {"type": "integer"}, "title": "Right Eye", "type": "array"}, "right_eye_top": {"items": {"type": "integer"}, "title": "Right Eye Top", "type": "array"}, "right_eye_right": {"items": {"type": "integer"}, "title": "Right Eye Right", "type": "array"}, "right_eye_bottom": {"items": {"type": "integer"}, "title": "Right Eye Bottom", "type": "array"}, "right_eye_left": {"items": {"type": "integer"}, "title": "Right Eye Left", "type": "array"}, "left_eyebrow_left": {"items": {"type": "integer"}, "title": "Left Eyebrow Left", "type": "array"}, "left_eyebrow_right": {"items": {"type": "integer"}, "title": "Left Eyebrow Right", "type": "array"}, "left_eyebrow_top": {"items": {"type": "integer"}, "title": "Left Eyebrow Top", "type": "array"}, "right_eyebrow_left": {"items": {"type": "integer"}, "title": "Right Eyebrow Left", "type": "array"}, "right_eyebrow_right": {"items": {"type": "integer"}, "title": "Right Eyebrow Right", "type": "array"}, "left_pupil": {"items": {"type": "integer"}, "title": "Left Pupil", "type": "array"}, "right_pupil": {"items": {"type": "integer"}, "title": "Right Pupil", "type": "array"}, "nose_tip": {"items": {"type": "integer"}, "title": "Nose Tip", "type": "array"}, "nose_bottom_right": {"items": {"type": "integer"}, "title": "Nose Bottom Right", "type": "array"}, "nose_bottom_left": {"items": {"type": "integer"}, "title": "Nose Bottom Left", "type": "array"}, "mouth_left": {"items": {"type": "integer"}, "title": "Mouth Left", "type": "array"}, "mouth_right": {"items": {"type": "integer"}, "title": "Mouth Right", "type": "array"}, "right_eyebrow_top": {"items": {"type": "integer"}, "title": "Right Eyebrow Top", "type": "array"}, "midpoint_between_eyes": {"items": {"type": "integer"}, "title": "Midpoint Between Eyes", "type": "array"}, "nose_bottom_center": {"items": {"type": "integer"}, "title": "Nose Bottom Center", "type": "array"}, "nose_left_alar_out_tip": {"items": {"type": "integer"}, "title": "Nose Left Alar Out Tip", "type": "array"}, "nose_left_alar_top": {"items": {"type": "integer"}, "title": "Nose Left Alar Top", "type": "array"}, "nose_right_alar_out_tip": {"items": {"type": "integer"}, "title": "Nose Right Alar Out Tip", "type": "array"}, "nose_right_alar_top": {"items": {"type": "integer"}, "title": "Nose Right Alar Top", "type": "array"}, "nose_root_left": {"items": {"type": "integer"}, "title": "Nose Root Left", "type": "array"}, "nose_root_right": {"items": {"type": "integer"}, "title": "Nose Root Right", "type": "array"}, "upper_lip": {"items": {"type": "integer"}, "title": "Upper Lip", "type": "array"}, "under_lip": {"items": {"type": "integer"}, "title": "Under Lip", "type": "array"}, "under_lip_bottom": {"items": {"type": "integer"}, "title": "Under Lip Bottom", "type": "array"}, "under_lip_top": {"items": {"type": "integer"}, "title": "Under Lip Top", "type": "array"}, "upper_lip_bottom": {"items": {"type": "integer"}, "title": "Upper Lip Bottom", "type": "array"}, "upper_lip_top": {"items": {"type": "integer"}, "title": "Upper Lip Top", "type": "array"}, "mouth_center": {"items": {"type": "integer"}, "title": "Mouth Center", "type": "array"}, "mouth_top": {"items": {"type": "integer"}, "title": "Mouth Top", "type": "array"}, "mouth_bottom": {"items": {"type": "integer"}, "title": "Mouth Bottom", "type": "array"}, "left_ear_tragion": {"items": {"type": "integer"}, "title": "Left Ear Tragion", "type": "array"}, "right_ear_tragion": {"items": {"type": "integer"}, "title": "Right Ear Tragion", "type": "array"}, "forehead_glabella": {"items": {"type": "integer"}, "title": "Forehead Glabella", "type": "array"}, "chin_gnathion": {"items": {"type": "integer"}, "title": "Chin Gnathion", "type": "array"}, "chin_left_gonion": {"items": {"type": "integer"}, "title": "Chin Left Gonion", "type": "array"}, "chin_right_gonion": {"items": {"type": "integer"}, "title": "Chin Right Gonion", "type": "array"}, "upper_jawline_left": {"items": {"type": "integer"}, "title": "Upper Jawline Left", "type": "array"}, "mid_jawline_left": {"items": {"type": "integer"}, "title": "Mid Jawline Left", "type": "array"}, "mid_jawline_right": {"items": {"type": "integer"}, "title": "Mid Jawline Right", "type": "array"}, "upper_jawline_right": {"items": {"type": "integer"}, "title": "Upper Jawline Right", "type": "array"}, "left_cheek_center": {"items": {"type": "integer"}, "title": "Left Cheek Center", "type": "array"}, "right_cheek_center": {"items": {"type": "integer"}, "title": "Right Cheek Center", "type": "array"}}, "title": "FaceLandmarks", "type": "object"}, "FaceMakeup": {"properties": {"eye_make": {"title": "Eye Make", "type": "boolean"}, "lip_make": {"title": "Lip Make", "type": "boolean"}}, "required": ["eye_make", "lip_make"], "title": "FaceMakeup", "type": "object"}, "FaceMatch": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/FaceCompareBoundingBox"}}, "required": ["confidence", "bounding_box"], "title": "FaceMatch", "type": "object"}, "FaceOcclusions": {"properties": {"eye_occluded": {"title": "Eye Occluded", "type": "boolean"}, "forehead_occluded": {"title": "Forehead Occluded", "type": "boolean"}, "mouth_occluded": {"title": "Mouth Occluded", "type": "boolean"}}, "required": ["eye_occluded", "forehead_occluded", "mouth_occluded"], "title": "FaceOcclusions", "type": "object"}, "FacePoses": {"properties": {"pitch": {"title": "Pitch", "type": "integer"}, "roll": {"title": "Roll", "type": "integer"}, "yaw": {"title": "Yaw", "type": "integer"}}, "required": ["pitch", "roll", "yaw"], "title": "FacePoses", "type": "object"}, "FaceQuality": {"properties": {"noise": {"title": "Noise", "type": "integer"}, "exposure": {"title": "Exposure", "type": "integer"}, "blur": {"title": "Blur", "type": "integer"}, "brightness": {"title": "Brightness", "type": "integer"}, "sharpness": {"title": "Sharpness", "type": "integer"}}, "required": ["noise", "exposure", "blur", "brightness", "sharpness"], "title": "FaceQuality", "type": "object"}, "FallbackTypeEnum": {"enum": ["rerun", "continue"], "type": "string", "description": "* `rerun` - Rerun\n* `continue` - Continue"}, "Feature": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 255}, "fullname": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true, "maxLength": 1000}}, "required": ["name"]}, "FieldError": {"type": "object", "properties": {"": {"type": "array", "items": {"type": "string"}}}, "required": [""]}, "FinalStatusEnum": {"enum": ["sucess", "fail"], "type": "string"}, "FinancialBankInformation": {"properties": {"iban": {"default": null, "description": "International Bank Account Number.", "title": "Iban", "type": "string"}, "swift": {"default": null, "description": "Society for Worldwide Interbank Financial Telecommunication code.", "title": "Swift", "type": "string"}, "bsb": {"default": null, "description": "Bank State Branch code (Australia).", "title": "Bsb", "type": "string"}, "sort_code": {"default": null, "description": "Sort code for UK banks.", "title": "Sort Code", "type": "string"}, "account_number": {"default": null, "description": "Bank account number.", "title": "Account Number", "type": "string"}, "routing_number": {"default": null, "description": "Routing number for banks in the United States.", "title": "Routing Number", "type": "string"}, "bic": {"default": null, "description": "Bank Identifier Code.", "title": "Bic", "type": "string"}}, "title": "FinancialBankInformation", "type": "object"}, "FinancialBarcode": {"properties": {"value": {"title": "Value", "type": "string"}, "type": {"title": "Type", "type": "string"}}, "required": ["value", "type"], "title": "FinancialBarcode", "type": "object"}, "FinancialCustomerInformation": {"properties": {"name": {"default": null, "description": "The name of the invoiced customer.", "title": "Name", "type": "string"}, "id_reference": {"default": null, "description": "Unique reference ID for the customer.", "title": "Id Reference", "type": "string"}, "mailling_address": {"default": null, "description": "The mailing address of the customer.", "title": "Mailling Address", "type": "string"}, "billing_address": {"default": null, "description": "The explicit billing address for the customer.", "title": "Billing Address", "type": "string"}, "shipping_address": {"default": null, "description": "The shipping address for the customer.", "title": "Shipping Address", "type": "string"}, "service_address": {"default": null, "description": "The service address associated with the customer.", "title": "Service Address", "type": "string"}, "remittance_address": {"default": null, "description": "The address to which payments should be remitted.", "title": "Remittance Address", "type": "string"}, "email": {"default": null, "description": "The email address of the customer.", "title": "Email", "type": "string"}, "phone": {"default": null, "description": "The phone number associated with the customer.", "title": "Phone", "type": "string"}, "vat_number": {"default": null, "description": "VAT (Value Added Tax) number of the customer.", "title": "Vat Number", "type": "string"}, "abn_number": {"default": null, "description": "ABN (Australian Business Number) of the customer.", "title": "Abn Number", "type": "string"}, "gst_number": {"default": null, "description": "GST (Goods and Services Tax) number of the customer.", "title": "Gst Number", "type": "string"}, "pan_number": {"default": null, "description": "PAN (Permanent Account Number) of the customer.", "title": "Pan Number", "type": "string"}, "business_number": {"default": null, "description": "Business registration number of the customer.", "title": "Business Number", "type": "string"}, "siret_number": {"default": null, "description": "SIRET (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises et de leurs \u00c9tablissements) number of the customer.", "title": "Siret Number", "type": "string"}, "siren_number": {"default": null, "description": "SIREN (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises) number of the customer.", "title": "Siren Number", "type": "string"}, "customer_number": {"default": null, "description": "Customer identification number.", "title": "Customer Number", "type": "string"}, "coc_number": {"default": null, "description": "Chamber of Commerce registration number.", "title": "Coc Number", "type": "string"}, "fiscal_number": {"default": null, "description": "Fiscal identification number of the customer.", "title": "Fiscal Number", "type": "string"}, "registration_number": {"default": null, "description": "Official registration number of the customer.", "title": "Registration Number", "type": "string"}, "tax_id": {"default": null, "description": "Tax identification number of the customer.", "title": "Tax Id", "type": "string"}, "website": {"default": null, "description": "The website associated with the customer.", "title": "Website", "type": "string"}, "remit_to_name": {"default": null, "description": "The name associated with the customer's remittance address.", "title": "Remit To Name", "type": "string"}, "city": {"default": null, "description": "The city associated with the customer's address.", "title": "City", "type": "string"}, "country": {"default": null, "description": "The country associated with the customer's address.", "title": "Country", "type": "string"}, "house_number": {"default": null, "description": "The house number associated with the customer's address.", "title": "House Number", "type": "string"}, "province": {"default": null, "description": "The province associated with the customer's address.", "title": "Province", "type": "string"}, "street_name": {"default": null, "description": "The street name associated with the customer's address.", "title": "Street Name", "type": "string"}, "zip_code": {"default": null, "description": "The ZIP code associated with the customer's address.", "title": "Zip Code", "type": "string"}, "municipality": {"default": null, "description": "The municipality associated with the customer's address.", "title": "Municipality", "type": "string"}}, "title": "FinancialCustomerInformation", "type": "object"}, "FinancialDocumentInformation": {"properties": {"invoice_receipt_id": {"default": null, "description": "Identifier for the invoice.", "title": "Invoice Receipt Id", "type": "string"}, "purchase_order": {"default": null, "description": "Purchase order related to the document.", "title": "Purchase Order", "type": "string"}, "invoice_date": {"default": null, "description": "Date of the invoice.", "title": "Invoice Date", "type": "string"}, "time": {"default": null, "description": "Time associated with the document.", "title": "Time", "type": "string"}, "invoice_due_date": {"default": null, "description": "Due date for the invoice.", "title": "Invoice Due Date", "type": "string"}, "service_start_date": {"default": null, "description": "Start date of the service associated with the document.", "title": "Service Start Date", "type": "string"}, "service_end_date": {"default": null, "description": "End date of the service associated with the document.", "title": "Service End Date", "type": "string"}, "reference": {"default": null, "description": "Reference number associated with the document.", "title": "Reference", "type": "string"}, "biller_code": {"default": null, "description": "Biller code associated with the document.", "title": "Biller Code", "type": "string"}, "order_date": {"default": null, "description": "Date of the order associated with the document.", "title": "Order Date", "type": "string"}, "tracking_number": {"default": null, "description": "Tracking number associated with the document.", "title": "Tracking Number", "type": "string"}, "barcodes": {"description": "List of barcodes associated with the document.", "items": {"$ref": "#/components/schemas/FinancialBarcode"}, "title": "Barcodes", "type": "array"}}, "title": "FinancialDocumentInformation", "type": "object"}, "FinancialDocumentMetadata": {"properties": {"document_index": {"default": null, "description": "Index of the detected document.", "title": "Document Index", "type": "integer"}, "document_page_number": {"default": null, "description": "Page number within the document.", "title": "Document Page Number", "type": "integer"}, "document_type": {"default": null, "description": "Type or category of the document.", "title": "Document Type", "type": "string"}}, "title": "FinancialDocumentMetadata", "type": "object"}, "FinancialLineItem": {"properties": {"tax": {"default": null, "description": "Tax amount for the line item.", "title": "Tax", "type": "integer"}, "amount_line": {"default": null, "description": "Total amount for the line item.", "title": "Amount Line", "type": "integer"}, "description": {"default": null, "description": "Description of the line item.", "title": "Description", "type": "string"}, "quantity": {"default": null, "description": "Quantity of units for the line item.", "title": "Quantity", "type": "integer"}, "unit_price": {"default": null, "description": "Unit price for each unit in the line item.", "title": "Unit Price", "type": "integer"}, "unit_type": {"default": null, "description": "Type of unit (e.g., hours, items).", "title": "Unit Type", "type": "string"}, "date": {"default": null, "description": "Date associated with the line item.", "title": "Date", "type": "string"}, "product_code": {"default": null, "description": "Product code or identifier for the line item.", "title": "Product Code", "type": "string"}, "purchase_order": {"default": null, "description": "Purchase order related to the line item.", "title": "Purchase Order", "type": "string"}, "tax_rate": {"default": null, "description": "Tax rate applied to the line item.", "title": "Tax Rate", "type": "integer"}, "base_total": {"default": null, "description": "Base total amount before any discounts or taxes.", "title": "Base Total", "type": "integer"}, "sub_total": {"default": null, "description": "Subtotal amount for the line item.", "title": "Sub Total", "type": "integer"}, "discount_amount": {"default": null, "description": "Amount of discount applied to the line item.", "title": "Discount Amount", "type": "integer"}, "discount_rate": {"default": null, "description": "Rate of discount applied to the line item.", "title": "Discount Rate", "type": "integer"}, "discount_code": {"default": null, "description": "Code associated with any discount applied to the line item.", "title": "Discount Code", "type": "string"}, "order_number": {"default": null, "description": "Order number associated with the line item.", "title": "Order Number", "type": "string"}, "title": {"default": null, "description": "Title or name of the line item.", "title": "Title", "type": "string"}}, "title": "FinancialLineItem", "type": "object"}, "FinancialLocalInformation": {"properties": {"currency": {"default": null, "description": "Currency used in financial transactions.", "title": "Currency", "type": "string"}, "currency_code": {"default": null, "description": "Currency code (e.g., USD, EUR).", "title": "Currency Code", "type": "string"}, "currency_exchange_rate": {"default": null, "description": "Exchange rate for the specified currency.", "title": "Currency Exchange Rate", "type": "string"}, "country": {"default": null, "description": "Country associated with the local financial information.", "title": "Country", "type": "string"}, "language": {"default": null, "description": "Language used in financial transactions.", "title": "Language", "type": "string"}}, "title": "FinancialLocalInformation", "type": "object"}, "FinancialMerchantInformation": {"properties": {"name": {"default": null, "description": "Name of the merchant.", "title": "Name", "type": "string"}, "address": {"default": null, "description": "Address of the merchant.", "title": "Address", "type": "string"}, "phone": {"default": null, "description": "Phone number of the merchant.", "title": "Phone", "type": "string"}, "tax_id": {"default": null, "description": "Tax identification number of the merchant.", "title": "Tax Id", "type": "string"}, "id_reference": {"default": null, "description": "Unique reference ID for the merchant.", "title": "Id Reference", "type": "string"}, "vat_number": {"default": null, "description": "VAT (Value Added Tax) number of the merchant.", "title": "Vat Number", "type": "string"}, "abn_number": {"default": null, "description": "ABN (Australian Business Number) of the merchant.", "title": "Abn Number", "type": "string"}, "gst_number": {"default": null, "description": "GST (Goods and Services Tax) number of the merchant.", "title": "Gst Number", "type": "string"}, "business_number": {"default": null, "description": "Business registration number of the merchant.", "title": "Business Number", "type": "string"}, "siret_number": {"default": null, "description": "SIRET (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises et de leurs \u00c9tablissements) number of the merchant.", "title": "Siret Number", "type": "string"}, "siren_number": {"default": null, "description": "SIREN (Syst\u00e8me d'Identification du R\u00e9pertoire des Entreprises) number of the merchant.", "title": "Siren Number", "type": "string"}, "pan_number": {"default": null, "description": "PAN (Permanent Account Number) of the merchant.", "title": "Pan Number", "type": "string"}, "coc_number": {"default": null, "description": "Chamber of Commerce registration number of the merchant.", "title": "Coc Number", "type": "string"}, "fiscal_number": {"default": null, "description": "Fiscal identification number of the merchant.", "title": "Fiscal Number", "type": "string"}, "email": {"default": null, "description": "Email address of the merchant.", "title": "Email", "type": "string"}, "fax": {"default": null, "description": "Fax number of the merchant.", "title": "Fax", "type": "string"}, "website": {"default": null, "description": "Website of the merchant.", "title": "Website", "type": "string"}, "registration": {"default": null, "description": "Official registration information of the merchant.", "title": "Registration", "type": "string"}, "city": {"default": null, "description": "City associated with the merchant's address.", "title": "City", "type": "string"}, "country": {"default": null, "description": "Country associated with the merchant's address.", "title": "Country", "type": "string"}, "house_number": {"default": null, "description": "House number associated with the merchant's address.", "title": "House Number", "type": "string"}, "province": {"default": null, "description": "Province associated with the merchant's address.", "title": "Province", "type": "string"}, "street_name": {"default": null, "description": "Street name associated with the merchant's address.", "title": "Street Name", "type": "string"}, "zip_code": {"default": null, "description": "ZIP code associated with the merchant's address.", "title": "Zip Code", "type": "string"}, "country_code": {"default": null, "description": "Country code associated with the merchant's location.", "title": "Country Code", "type": "string"}}, "title": "FinancialMerchantInformation", "type": "object"}, "FinancialParserObjectDataClass": {"properties": {"customer_information": {"$ref": "#/components/schemas/FinancialCustomerInformation"}, "merchant_information": {"$ref": "#/components/schemas/FinancialMerchantInformation"}, "payment_information": {"$ref": "#/components/schemas/FinancialPaymentInformation"}, "financial_document_information": {"$ref": "#/components/schemas/FinancialDocumentInformation"}, "local": {"$ref": "#/components/schemas/FinancialLocalInformation"}, "bank": {"$ref": "#/components/schemas/FinancialBankInformation"}, "item_lines": {"description": "List of line items associated with the document.", "items": {"$ref": "#/components/schemas/FinancialLineItem"}, "title": "Item Lines", "type": "array"}, "document_metadata": {"$ref": "#/components/schemas/FinancialDocumentMetadata"}}, "required": ["customer_information", "merchant_information", "payment_information", "financial_document_information", "local", "bank", "document_metadata"], "title": "FinancialParserObjectDataClass", "type": "object"}, "FinancialPaymentInformation": {"properties": {"amount_due": {"default": null, "description": "Amount due for payment.", "title": "Amount Due", "type": "integer"}, "amount_tip": {"default": null, "description": "Tip amount in a financial transaction.", "title": "Amount Tip", "type": "integer"}, "amount_shipping": {"default": null, "description": "Shipping cost in a financial transaction.", "title": "Amount Shipping", "type": "integer"}, "amount_change": {"default": null, "description": "Change amount in a financial transaction.", "title": "Amount Change", "type": "integer"}, "amount_paid": {"default": null, "description": "Amount already paid in a financial transaction.", "title": "Amount Paid", "type": "integer"}, "total": {"default": null, "description": "Total amount in the invoice.", "title": "Total", "type": "integer"}, "subtotal": {"default": null, "description": "Subtotal amount in a financial transaction.", "title": "Subtotal", "type": "integer"}, "total_tax": {"default": null, "description": "Total tax amount in a financial transaction.", "title": "Total Tax", "type": "integer"}, "tax_rate": {"default": null, "description": "Tax rate applied in a financial transaction.", "title": "Tax Rate", "type": "integer"}, "discount": {"default": null, "description": "Discount amount applied in a financial transaction.", "title": "Discount", "type": "integer"}, "gratuity": {"default": null, "description": "Gratuity amount in a financial transaction.", "title": "Gratuity", "type": "integer"}, "service_charge": {"default": null, "description": "Service charge in a financial transaction.", "title": "Service Charge", "type": "integer"}, "previous_unpaid_balance": {"default": null, "description": "Previous unpaid balance in a financial transaction.", "title": "Previous Unpaid Balance", "type": "integer"}, "prior_balance": {"default": null, "description": "Prior balance before the current financial transaction.", "title": "Prior Balance", "type": "integer"}, "payment_terms": {"default": null, "description": "Terms and conditions for payment.", "title": "Payment Terms", "type": "string"}, "payment_method": {"default": null, "description": "Payment method used in the financial transaction.", "title": "Payment Method", "type": "string"}, "payment_card_number": {"default": null, "description": "Card number used in the payment.", "title": "Payment Card Number", "type": "string"}, "payment_auth_code": {"default": null, "description": "Authorization code for the payment.", "title": "Payment Auth Code", "type": "string"}, "shipping_handling_charge": {"default": null, "description": "Charge for shipping and handling in a financial transaction.", "title": "Shipping Handling Charge", "type": "integer"}, "transaction_number": {"default": null, "description": "Unique identifier for the financial transaction.", "title": "Transaction Number", "type": "string"}, "transaction_reference": {"default": null, "description": "Reference number for the financial transaction.", "title": "Transaction Reference", "type": "string"}}, "title": "FinancialPaymentInformation", "type": "object"}, "GeneralSentimentEnum": {"enum": ["Positive", "Negative", "Neutral"], "type": "string"}, "GenerateRequest": {"type": "object", "properties": {"messages": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "A list containing all the conversations between the user and the assistant.\nEach item in the list should be a dictionary with two keys: 'role' and 'message'.\n\n**role**: Specifies the role of the speaker and can have the values 'user', 'system', 'assistant' or 'tool'.\nThe system role instructs the way the model should answer, e.g. 'You are a helpful assistant'. The user\nrole specifies the user query and assistant is the model's response. The tool role is for external tools that\ncan be used in the conversation.\n\n**message**: A list of dictionaries. Each dictionary in the 'message' list must contain the keys 'type' and 'content'.\n\n#### Structure\n- **type**: Specifies the type of content and can be 'image_url' or 'text'.\n- **content**: A dictionary with the actual content based on the 'type':\n - If 'type' is 'image_url', 'content' must contain 'image_url' and must not contain 'text'.\n - If 'type' is 'text', 'content' must contain 'text' and must not contain 'image_url'.\n\n#### Example\n```json\n[\n {\n \"role\": \"user\",\n \"content\": [\n {\n \"type\": \"text\",\n \"text\": \"Describe this image\"\n },\n {\n \"type\": \"image_url\",\n \"image_url\": {\n \"url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg\"\n }\n }\n ]\n }\n]\n```"}, "model": {"type": "string", "minLength": 1, "description": "The OpenAI model to use for the chat completion. \nThis field is required and specifies which language model will process the conversation. \n\n**Example values**: 'gpt-3.5-turbo', 'gpt-4', 'gpt-4-turbo'"}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "metadata": {"type": "array", "items": {"$ref": "#/components/schemas/MetadataRequest"}, "description": "Optional list of metadata associated with the chat request. \nCan be used to provide additional context or tracking information. \n\n**Example**:\n```json\n{\n \"metadata\": [\n {\"key\": \"conversation_id\", \"value\": \"chat_12345\"},\n {\"key\": \"source\", \"value\": \"customer_support\"}\n ]\n}\n```"}, "frequency_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Controls repetitiveness of model responses by penalizing frequent tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Reduce token repetition\n- Negative values: Encourage repetition\n- 0.0: Default behavior\n\n**Example**: 1.5 to significantly reduce repeated phrases"}, "logit_bias": {"type": "object", "additionalProperties": {"type": "number", "format": "double"}, "description": "Modify the likelihood of specific tokens appearing in the response. \nA dictionary where keys are token IDs and values are bias scores. \n\n**Example**:\n```json\n{\n \"logit_bias\": {\n \"50256\": -100, # Reduce probability of end-of-text token\n \"15\": 5 # Slightly increase probability of a specific token\n }\n}\n```"}, "logprobs": {"type": "boolean", "description": "If set to True, returns log probabilities of the most likely tokens. \nUseful for advanced token probability analysis. \n\n**Example**: True to get detailed token likelihood information"}, "top_logprobs": {"type": "integer", "maximum": 20, "minimum": 0, "description": "Number of top log probabilities to return with each token. \nMust be between 0 and 20. \n\n**Example**: 5 to get top 5 most likely tokens for each position"}, "max_completion_tokens": {"type": "integer", "minimum": 1, "description": "Maximum number of tokens to generate in the completion. \nMust be at least 1. \n\n**Example**: 150 to limit response to approximately 100-150 words"}, "n": {"type": "integer", "minimum": 1, "description": "Number of chat completion choices to generate.\n\n **Example**: 3 to generate multiple alternative responses"}, "modalities": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of supported input/output modalities for the chat. \n\n**Example**:\n```json\n{\n \"modalities\": [\"text\", \"image\", \"audio\"]\n}\n```"}, "prediction": {"type": "object", "additionalProperties": {}, "description": "Optional field for storing prediction-related information. \nFlexible dictionary to capture model's predictive metadata. \n\n**Example**:\n```json\n{\n \"prediction\": {\n \"confidence_score\": 0.85,\n \"top_prediction\": \"response_category\"\n }\n}\n```"}, "audio": {"type": "object", "additionalProperties": {}, "description": "Optional dictionary for audio-related parameters or metadata. \n\n**Example**:\n```json\n{\n \"audio\": {\n \"language\": \"en-US\",\n \"transcription_format\": \"srt\"\n }\n}\n```"}, "presence_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Adjusts likelihood of discussing new topics by penalizing existing tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Encourage more diverse topics\n- Negative values: Keep discussion more focused\n- 0.0: Default behavior\n\n**Example**: 1.0 to promote topic diversity"}, "response_format": {"type": "object", "additionalProperties": {}, "description": "Specify the desired response format for the completion. \n\n**Example**:\n```json\n{\n \"response_format\": {\n \"type\": \"json_object\",\n \"schema\": {...}\n }\n}\n```"}, "seed": {"type": "integer", "description": "Set a seed for deterministic sampling to reproduce consistent results. \n\n**Example**: 42 for a reproducible random generation process"}, "service_tier": {"allOf": [{"$ref": "#/components/schemas/ServiceTierEnum"}], "description": "Select the service tier for the API request. \n\n**Choices**:\n- 'auto': Automatically select appropriate tier\n- 'default': Use default service configuration\n\n* `auto` - auto\n* `default` - default"}, "stop": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of strings that will cause the model to stop generating. \n\n**Example**:\n```json\n{\n \"stop\": [\"\\n\", \"Human:\", \"AI:\"]\n}\n```"}, "stream": {"type": "boolean", "default": false, "description": "If True, returns tokens as they are generated in a streaming format. \nDefault is False. \n\n**Example**: True for real-time token streaming"}, "stream_options": {"type": "object", "additionalProperties": {}, "description": "Additional configuration for streaming responses. \n\n**Example**:\n```json\n{\n \"stream_options\": {\n \"include_usage\": true\n }\n}\n```"}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "description": "Controls randomness in token selection. \nRanges from 0.0 to 2.0. \n\n**Values**:\n- 0.0: Most deterministic, focused responses\n- 1.0: Balanced randomness\n- 2.0: Most creative, unpredictable responses\n\n**Example**: 0.7 for a good balance of creativity and focus"}, "top_p": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "description": "Nucleus sampling threshold for token selection. \nRanges from 0.0 to 1.0. Default is 1.0. \n\n**Values**:\n- 1.0: Consider all tokens\n- Lower values: More focused, deterministic sampling\n\n**Example**: 0.9 to select from top 90% most probable tokens"}, "tools": {"type": "array", "items": {}, "description": "List of tools or function definitions available to the model. \n\n**Example**:\n```json\n{\n \"tools\": [\n {\n \"type\": \"function\",\n \"function\": {\n \"name\": \"get_weather\",\n \"description\": \"Retrieve current weather\"\n }\n }\n ]\n}\n```"}, "tool_choice": {"type": "string", "minLength": 1, "description": "Specify how tools should be used in the completion. \n\n**Example values**:\n- 'auto': Model decides when to use tools\n- 'none': Disable tool usage\n- Specific tool name to always use a particular tool"}, "parallel_tool_calls": {"type": "boolean", "description": "Allow the model to make multiple tool calls in parallel. \n\n**Example**: True to enable concurrent tool invocations"}, "user": {"type": "string", "minLength": 1, "description": "Optional identifier for the end-user to help track and monitor API usage. \n\n**Example**: 'user_123456'"}, "function_call": {"type": "string", "minLength": 1, "description": "Control how function calls are handled. \n\n**Example values**:\n- 'auto': Default behavior\n- 'none': Disable function calls\n- Specific function name to force its execution"}, "functions": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "List of function definitions available to the model. \n\n**Example**:\n```json\n{\n \"functions\": [\n {\n \"name\": \"get_current_weather\",\n \"description\": \"Get the current weather for a location\",\n \"parameters\": {...}\n }\n ]\n}\n```"}, "thinking": {"allOf": [{"$ref": "#/components/schemas/ThinkingRequest"}], "description": "Configuration for enabling Claude's extended thinking. When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer. Requires a minimum budget of 1,024 tokens and counts towards your max_tokens limit.\n\n**Example**:\n```json\n{\n 'thinking': {\n 'type': 'enabled'\n 'budget_tokens': '1024' }\n}\n```"}, "web_search_options": {"type": "object", "additionalProperties": {}, "description": "Options for web search integration. \n **Example**:\n ```json\n web_search_options={\n \"search_context_size\": \"medium\" # Options: \"low\", \"medium\", \"high\"\n }\n ```"}, "filter_documents": {"type": "object", "additionalProperties": {}, "default": {}, "description": "Filter uploaded documents based on their metadata. Specify key-value pairs where the key represents the metadata field and the value is the desired metadata value. Please ensure that the provided metadata keys are available in your database."}, "min_score": {"type": "number", "format": "double", "default": 0.0, "description": "A minimum score threshold for the model to consider a chunk as a valid response. Higher values mean the model will be more conservative and only return chunks that are more similar to the query. Lower values mean the model will be more open to returning chunks that are less similar to the query."}, "k": {"type": "integer", "minimum": 1, "default": 3, "description": "How many results chunk you want to return"}, "max_tokens": {"type": "integer", "maximum": 16385, "minimum": 1, "default": 100, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "conversation_id": {"type": "string", "format": "uuid"}}, "required": ["messages", "model"]}, "GeneratedImageDataClass": {"properties": {"image": {"title": "Image", "type": "string"}, "image_resource_url": {"title": "Image Resource Url", "type": "string"}}, "required": ["image", "image_resource_url"], "title": "GeneratedImageDataClass", "type": "object"}, "GenerationAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "text": {"type": "string", "minLength": 1, "description": "The text prompt that describes the content and style of the video you want to generate. Be specific and detailed to guide the AI in creating your desired video."}, "file": {"type": "string", "format": "binary", "description": "Initial keyframe image for the video. Upload a single JPEG or PNG file that will serve as the starting point for video generation. This image will influence the style, composition, and initial scene of the generated video.", "pattern": "(?:png|jpg)$"}, "duration": {"type": "integer", "minimum": 1, "default": 6, "description": "Length of the generated video in seconds. Minimum duration is 1 second. If not specified, the video will default to 6 seconds."}, "fps": {"type": "integer", "minimum": 1, "default": 24, "description": "Frames per second (FPS) of the generated video. Determines the smoothness of the video motion. Default is 24 FPS, which is standard for most video content. Minimum value is 1 FPS."}, "dimension": {"type": "string", "minLength": 1, "default": "1280x720", "description": "Video resolution in the format 'width x height' (e.g., '1280x720'). Default resolution is 1280x720 (720p). Ensure the format is exactly 'width x height' with a lowercase 'x'."}, "seed": {"type": "integer", "default": 12, "description": "Random seed value that initializes the noise for the generation process. Use this to create consistent or unique variations of your video:\n- Range: 0 to 2,147,483,646\n- Default: 12\n- Changing the seed while keeping other parameters constant will produce a different video that still matches your prompt\n- Useful for exploring multiple creative variations of the same concept"}}, "required": ["providers", "text"]}, "ImageaiDetectionAiDetectionDataClassPredictionEnum": {"enum": ["ai-generated", "original"], "type": "string"}, "InfosIdentityParserDataClass": {"properties": {"last_name": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "given_names": {"items": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "title": "Given Names", "type": "array"}, "birth_place": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "birth_date": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "issuance_date": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "expire_date": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "document_id": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "issuing_state": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "address": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "age": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "country": {"$ref": "#/components/schemas/Country"}, "document_type": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "gender": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "image_id": {"items": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "title": "Image Id", "type": "array"}, "image_signature": {"items": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "title": "Image Signature", "type": "array"}, "mrz": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}, "nationality": {"$ref": "#/components/schemas/ItemIdentityParserDataClass"}}, "required": ["last_name", "birth_place", "birth_date", "issuance_date", "expire_date", "document_id", "issuing_state", "address", "age", "country", "document_type", "gender", "mrz", "nationality"], "title": "InfosIdentityParserDataClass", "type": "object"}, "InfosKeywordExtractionDataClass": {"properties": {"keyword": {"title": "Keyword", "type": "string"}, "importance": {"title": "Importance", "type": "integer"}}, "required": ["keyword", "importance"], "title": "InfosKeywordExtractionDataClass", "type": "object"}, "InfosLanguageDetectionDataClass": {"properties": {"language": {"title": "Language", "type": "string"}, "display_name": {"title": "Display Name", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["language", "display_name", "confidence"], "title": "InfosLanguageDetectionDataClass", "type": "object"}, "InfosNamedEntityRecognitionDataClass": {"properties": {"entity": {"title": "Entity", "type": "string"}, "category": {"title": "Category", "type": "string"}, "importance": {"title": "Importance", "type": "integer"}}, "required": ["entity", "category", "importance"], "title": "InfosNamedEntityRecognitionDataClass", "type": "object"}, "ItemBankCheckParsingDataClass": {"properties": {"amount": {"title": "Amount", "type": "integer"}, "amount_text": {"title": "Amount Text", "type": "string"}, "bank_address": {"title": "Bank Address", "type": "string"}, "bank_name": {"title": "Bank Name", "type": "string"}, "date": {"title": "Date", "type": "string"}, "memo": {"title": "Memo", "type": "string"}, "payer_address": {"title": "Payer Address", "type": "string"}, "payer_name": {"title": "Payer Name", "type": "string"}, "receiver_address": {"title": "Receiver Address", "type": "string"}, "receiver_name": {"title": "Receiver Name", "type": "string"}, "currency": {"title": "Currency", "type": "string"}, "micr": {"$ref": "#/components/schemas/MicrModel"}}, "required": ["amount", "amount_text", "bank_address", "bank_name", "date", "memo", "payer_address", "payer_name", "receiver_address", "receiver_name", "currency", "micr"], "title": "ItemBankCheckParsingDataClass", "type": "object"}, "ItemDataExtraction": {"properties": {"key": {"title": "Key", "type": "string"}, "value": {"title": "Value"}, "bounding_box": {"$ref": "#/components/schemas/BoundingBox"}, "confidence_score": {"maximum": 1.0, "minimum": 0.0, "title": "Confidence Score", "type": "integer"}}, "required": ["key", "value", "bounding_box", "confidence_score"], "title": "ItemDataExtraction", "type": "object"}, "ItemIdentityParserDataClass": {"properties": {"value": {"default": null, "title": "Value", "type": "string"}, "confidence": {"default": null, "title": "Confidence", "type": "integer"}}, "title": "ItemIdentityParserDataClass", "type": "object"}, "LandmarkItem": {"properties": {"description": {"title": "Description", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "bounding_box": {"items": {"$ref": "#/components/schemas/LandmarkVertice"}, "title": "Bounding Box", "type": "array"}, "locations": {"items": {"$ref": "#/components/schemas/LandmarkLocation"}, "title": "Locations", "type": "array"}}, "required": ["description", "confidence"], "title": "LandmarkItem", "type": "object"}, "LandmarkLatLng": {"properties": {"latitude": {"title": "Latitude", "type": "integer"}, "longitude": {"title": "Longitude", "type": "integer"}}, "required": ["latitude", "longitude"], "title": "LandmarkLatLng", "type": "object"}, "LandmarkLocation": {"properties": {"lat_lng": {"$ref": "#/components/schemas/LandmarkLatLng"}}, "required": ["lat_lng"], "title": "LandmarkLocation", "type": "object"}, "LandmarkVertice": {"properties": {"x": {"title": "X", "type": "integer"}, "y": {"title": "Y", "type": "integer"}}, "required": ["x", "y"], "title": "LandmarkVertice", "type": "object"}, "LandmarksVideo": {"properties": {"eye_left": {"items": {"type": "integer"}, "title": "Eye Left", "type": "array"}, "eye_right": {"items": {"type": "integer"}, "title": "Eye Right", "type": "array"}, "nose": {"items": {"type": "integer"}, "title": "Nose", "type": "array"}, "mouth_left": {"items": {"type": "integer"}, "title": "Mouth Left", "type": "array"}, "mouth_right": {"items": {"type": "integer"}, "title": "Mouth Right", "type": "array"}}, "title": "LandmarksVideo", "type": "object"}, "LaunchAsyncJobResponse": {"type": "object", "properties": {"public_id": {"type": "string", "format": "uuid"}}, "required": ["public_id"]}, "Line": {"description": "Line of a document\n\nAttributes:\n text (str): Text detected in the line\n bounding_boxes (Sequence[BoundingBox]): Bounding boxes of the words in the line\n words (Sequence[Word]): List of words of the line\n confidence (float): Confidence of the line", "properties": {"text": {"description": "Text detected in the line", "title": "Text", "type": "string"}, "words": {"description": "List of words", "items": {"$ref": "#/components/schemas/Word"}, "title": "Words", "type": "array"}, "bounding_box": {"allOf": [{"$ref": "#/components/schemas/BoundingBox"}], "default": null, "description": "Bounding box of the line, can be None"}, "confidence": {"description": "Confidence of the line", "title": "Confidence", "type": "integer"}}, "required": ["text", "confidence"], "title": "Line", "type": "object"}, "ListAsyncJobResponse": {"type": "object", "properties": {"jobs": {"type": "array", "items": {"$ref": "#/components/schemas/AsyncJobList"}}}, "required": ["jobs"]}, "ListChunkRequest": {"type": "object", "properties": {"filter_documents": {"type": "object", "additionalProperties": {}, "default": {}, "description": "Filter uploaded documents based on their metadata. Specify key-value pairs where the key represents the metadata field and the value is the desired metadata value. Please ensure that the provided metadata keys are available in your database."}, "limit": {"type": "integer", "minimum": 1, "default": 10, "description": "Specifies the maximum number of chunk IDs to return. Defaults to 10 if not provided."}, "with_payload": {"type": "boolean", "default": false, "description": "If set to True, includes the full payload of each chunk in the response. By default, only chunk IDs are returned."}}}, "LogoBoundingPoly": {"properties": {"vertices": {"description": "Vertices of the logos in the image", "items": {"$ref": "#/components/schemas/LogoVertice"}, "title": "Vertices", "type": "array"}}, "title": "LogoBoundingPoly", "type": "object"}, "LogoItem": {"properties": {"bounding_poly": {"allOf": [{"$ref": "#/components/schemas/LogoBoundingPoly"}], "default": {"vertices": []}}, "description": {"description": "Name of the logo", "title": "Description", "type": "string"}, "score": {"description": "Confidence score how sure it's this is a real logo.", "title": "Score", "type": "integer"}}, "required": ["description", "score"], "title": "LogoItem", "type": "object"}, "LogoTrack": {"properties": {"description": {"title": "Description", "type": "string"}, "tracking": {"items": {"$ref": "#/components/schemas/VideoLogo"}, "title": "Tracking", "type": "array"}}, "required": ["description"], "title": "LogoTrack", "type": "object"}, "LogoVertice": {"properties": {"x": {"description": "The x-coordinate of the vertex.", "title": "X", "type": "integer"}, "y": {"description": "The y-coordinate of the vertex.", "title": "Y", "type": "integer"}}, "required": ["x", "y"], "title": "LogoVertice", "type": "object"}, "LowerCloth": {"properties": {"value": {"title": "Value", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["value", "confidence"], "title": "LowerCloth", "type": "object"}, "Message": {"type": "object", "properties": {"user_text": {"type": "string"}, "assistant_text": {"type": "string", "nullable": true}}, "required": ["user_text"]}, "MessageRequest": {"type": "object", "properties": {"user_text": {"type": "string", "minLength": 1}, "assistant_text": {"type": "string", "nullable": true}}, "required": ["user_text"]}, "MetadataRequest": {"type": "object", "properties": {"key": {"type": "string", "minLength": 1, "maxLength": 64}, "value": {"type": "string", "minLength": 1, "maxLength": 512}}, "required": ["key", "value"]}, "MicrModel": {"properties": {"raw": {"title": "Raw", "type": "string"}, "account_number": {"title": "Account Number", "type": "string"}, "routing_number": {"title": "Routing Number", "type": "string"}, "serial_number": {"title": "Serial Number", "type": "string"}, "check_number": {"title": "Check Number", "type": "string"}}, "required": ["raw", "account_number", "routing_number", "serial_number", "check_number"], "title": "MicrModel", "type": "object"}, "NestedBadRequest": {"type": "object", "properties": {"type": {"type": "string"}, "message": {"$ref": "#/components/schemas/FieldError"}}, "required": ["message", "type"]}, "NestedError": {"type": "object", "properties": {"type": {"type": "string"}, "message": {"type": "string"}}, "required": ["message", "type"]}, "NotFoundResponse": {"type": "object", "properties": {"details": {"type": "string", "default": "Not Found"}}}, "NullEnum": {"enum": [null]}, "ObjectFrame": {"properties": {"timestamp": {"title": "Timestamp", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoObjectBoundingBox"}}, "required": ["timestamp", "bounding_box"], "title": "ObjectFrame", "type": "object"}, "ObjectItem": {"properties": {"label": {"title": "Label", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "x_min": {"title": "X Min", "type": "integer"}, "x_max": {"title": "X Max", "type": "integer"}, "y_min": {"title": "Y Min", "type": "integer"}, "y_max": {"title": "Y Max", "type": "integer"}}, "required": ["label", "confidence", "x_min", "x_max", "y_min", "y_max"], "title": "ObjectItem", "type": "object"}, "ObjectTrack": {"properties": {"description": {"title": "Description", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "frames": {"items": {"$ref": "#/components/schemas/ObjectFrame"}, "title": "Frames", "type": "array"}}, "required": ["description", "confidence"], "title": "ObjectTrack", "type": "object"}, "OcrTablesAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "language": {"type": "string", "nullable": true, "description": "Language code of the language the document is written in (ex: fr (French), en (English), es (Spanish))"}}, "required": ["providers"]}, "OptionEnum": {"enum": ["FEMALE", "MALE"], "type": "string", "description": "* `FEMALE` - Female\n* `MALE` - Male"}, "Page": {"description": "Page of a document\n\nAttributes:\n lines (Sequence[Line]): List of lines of the page", "properties": {"lines": {"description": "List of lines", "items": {"$ref": "#/components/schemas/Line"}, "title": "Lines", "type": "array"}}, "title": "Page", "type": "object"}, "PaginatedBatchResponse": {"type": "object", "properties": {"total": {"type": "integer", "description": "Total requests made"}, "current_page": {"type": "integer", "description": "Current page number"}, "last_page": {"type": "integer"}, "per_page": {"type": "integer", "description": "Number of requests per page"}, "From": {"type": "integer"}, "to": {"type": "integer"}, "prev_page_url": {"type": "string", "format": "uri"}, "next_page_url": {"type": "string", "format": "uri"}, "requests": {"type": "array", "items": {"$ref": "#/components/schemas/BatchResponseRequest"}}, "status": {"$ref": "#/components/schemas/Status889Enum"}, "created": {"type": "string", "format": "date-time", "readOnly": true}, "updated": {"type": "string", "format": "date-time", "readOnly": true}}, "required": ["From", "created", "current_page", "last_page", "per_page", "requests", "to", "total", "updated"]}, "PaginatedExecutionListList": {"type": "object", "required": ["count", "results"], "properties": {"count": {"type": "integer"}, "next": {"type": "string", "nullable": true, "format": "uri"}, "previous": {"type": "string", "nullable": true, "format": "uri"}, "results": {"type": "array", "items": {"$ref": "#/components/schemas/ExecutionList"}}}}, "PaginatedPromptCreateList": {"type": "object", "required": ["count", "results"], "properties": {"count": {"type": "integer"}, "next": {"type": "string", "nullable": true, "format": "uri"}, "previous": {"type": "string", "nullable": true, "format": "uri"}, "results": {"type": "array", "items": {"$ref": "#/components/schemas/PromptCreate"}}}}, "PaginatedPromptHistoryList": {"type": "object", "required": ["count", "results"], "properties": {"count": {"type": "integer"}, "next": {"type": "string", "nullable": true, "format": "uri"}, "previous": {"type": "string", "nullable": true, "format": "uri"}, "results": {"type": "array", "items": {"$ref": "#/components/schemas/PromptHistory"}}}}, "PatchedAskYodaProjectUpdateRequest": {"type": "object", "properties": {"ocr_provider": {"type": "string", "minLength": 1, "default": "amazon"}, "speech_to_text_provider": {"type": "string", "minLength": 1, "default": "openai"}, "llm_provider": {"type": "string", "minLength": 1, "description": "Select a default LLM provider to use in your project."}, "llm_model": {"type": "string", "minLength": 1, "description": "Select a default Model for LLM provider to use in your project"}, "chunk_size": {"type": "integer", "maximum": 10000, "minimum": 1, "nullable": true}, "chunk_separators": {"type": "array", "items": {"type": "string"}, "nullable": true}}}, "PatchedAssetUpdateRequest": {"type": "object", "properties": {"sub_resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}}}, "PatchedConversationDetailRequest": {"type": "object", "properties": {"name": {"type": "string", "nullable": true, "maxLength": 255}}}, "PatchedCustomTokenUpdateRequest": {"type": "object", "properties": {"balance": {"type": "number", "format": "double", "maximum": 100000, "minimum": -100000, "exclusiveMaximum": true, "exclusiveMinimum": true, "description": "Optional remaining credits balance for this Token, if `active_balance` is set to True and the balance reaches 0, this token will become unusable"}, "expire_time": {"type": "string", "format": "date-time", "nullable": true}, "active_balance": {"type": "boolean", "description": "Weither to use the balance field or not."}}}, "PatchedPromptHistoryRequest": {"type": "object", "properties": {"text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "PatchedPromptUpdateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "current_version": {"type": "integer", "description": "The production version of the prompt, that is used by default when you call the prompt"}, "file_urls": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "PatchedResourceUpdateRequest": {"type": "object", "properties": {"resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "minLength": 1, "maxLength": 100}}}, "PatchedWebhookParametersRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}}, "PersonAttributes": {"properties": {"upper_cloths": {"items": {"$ref": "#/components/schemas/UpperCloth"}, "title": "Upper Cloths", "type": "array"}, "lower_cloths": {"items": {"$ref": "#/components/schemas/LowerCloth"}, "title": "Lower Cloths", "type": "array"}}, "title": "PersonAttributes", "type": "object"}, "PersonLandmarks": {"properties": {"eye_left": {"items": {"type": "integer"}, "title": "Eye Left", "type": "array"}, "eye_right": {"items": {"type": "integer"}, "title": "Eye Right", "type": "array"}, "nose": {"items": {"type": "integer"}, "title": "Nose", "type": "array"}, "ear_left": {"items": {"type": "integer"}, "title": "Ear Left", "type": "array"}, "ear_right": {"items": {"type": "integer"}, "title": "Ear Right", "type": "array"}, "shoulder_left": {"items": {"type": "integer"}, "title": "Shoulder Left", "type": "array"}, "shoulder_right": {"items": {"type": "integer"}, "title": "Shoulder Right", "type": "array"}, "elbow_left": {"items": {"type": "integer"}, "title": "Elbow Left", "type": "array"}, "elbow_right": {"items": {"type": "integer"}, "title": "Elbow Right", "type": "array"}, "wrist_left": {"items": {"type": "integer"}, "title": "Wrist Left", "type": "array"}, "wrist_right": {"items": {"type": "integer"}, "title": "Wrist Right", "type": "array"}, "hip_left": {"items": {"type": "integer"}, "title": "Hip Left", "type": "array"}, "hip_right": {"items": {"type": "integer"}, "title": "Hip Right", "type": "array"}, "knee_left": {"items": {"type": "integer"}, "title": "Knee Left", "type": "array"}, "knee_right": {"items": {"type": "integer"}, "title": "Knee Right", "type": "array"}, "ankle_left": {"items": {"type": "integer"}, "title": "Ankle Left", "type": "array"}, "ankle_right": {"items": {"type": "integer"}, "title": "Ankle Right", "type": "array"}, "mouth_left": {"items": {"type": "integer"}, "title": "Mouth Left", "type": "array"}, "mouth_right": {"items": {"type": "integer"}, "title": "Mouth Right", "type": "array"}}, "title": "PersonLandmarks", "type": "object"}, "PersonTracking": {"properties": {"offset": {"title": "Offset", "type": "integer"}, "attributes": {"$ref": "#/components/schemas/PersonAttributes"}, "landmarks": {"$ref": "#/components/schemas/PersonLandmarks"}, "poses": {"$ref": "#/components/schemas/VideoPersonPoses"}, "quality": {"$ref": "#/components/schemas/VideoPersonQuality"}, "bounding_box": {"$ref": "#/components/schemas/VideoTrackingBoundingBox"}}, "required": ["offset", "bounding_box"], "title": "PersonTracking", "type": "object"}, "PlagiaDetectionCandidate": {"properties": {"url": {"title": "Url", "type": "string"}, "plagia_score": {"title": "Plagia Score", "type": "integer"}, "prediction": {"title": "Prediction", "type": "string"}, "plagiarized_text": {"title": "Plagiarized Text", "type": "string"}}, "required": ["url", "plagia_score", "prediction", "plagiarized_text"], "title": "PlagiaDetectionCandidate", "type": "object"}, "PlagiaDetectionItem": {"properties": {"text": {"title": "Text", "type": "string"}, "candidates": {"items": {"$ref": "#/components/schemas/PlagiaDetectionCandidate"}, "title": "Candidates", "type": "array"}}, "required": ["text"], "title": "PlagiaDetectionItem", "type": "object"}, "PredictionB20Enum": {"enum": ["deepfake", "original"], "type": "string"}, "PriceUnitTypeEnum": {"enum": ["file", "image", "page", "size", "request", "seconde", "minute", "free", "hour", "char", "token", "exec_time", "unknown"], "type": "string", "description": "* `file` - File\n* `image` - Image\n* `page` - Page\n* `size` - Size\n* `request` - Request\n* `seconde` - Second\n* `minute` - Minute\n* `free` - Free\n* `hour` - Hour\n* `char` - Characters\n* `token` - Token\n* `exec_time` - Execution Time\n* `unknown` - Unknown"}, "PricingSerialzier": {"type": "object", "properties": {"model_name": {"type": "string", "description": "Model name, default to 'default' if no models to chose from", "maxLength": 255}, "price": {"type": "string", "format": "decimal", "pattern": "^-?\\d{0,6}(?:\\.\\d{0,9})?$"}, "price_unit_quantity": {"type": "integer", "maximum": 2147483647, "minimum": 0}, "min_price_quantity": {"type": "integer", "maximum": 2147483647, "minimum": 0, "nullable": true}, "price_unit_type": {"$ref": "#/components/schemas/PriceUnitTypeEnum"}, "detail_type": {"nullable": true, "description": "(Optional) type of extra value, MUST be the same name as the feature parameter name. eg: resolution\n\n* `resolution` - Resolution\n* `document_type` - Document Type", "oneOf": [{"$ref": "#/components/schemas/DetailTypeEnum"}, {"$ref": "#/components/schemas/BlankEnum"}, {"$ref": "#/components/schemas/NullEnum"}]}, "detail_value": {"type": "string", "nullable": true, "description": "(Optional) extra value for detailed pricing, eg: 250x250 for resolution", "maxLength": 255}, "get_detail_type_display": {"type": "string", "readOnly": true}, "is_post_call": {"type": "boolean"}}, "required": ["get_detail_type_display"]}, "ProjectTypeEnum": {"enum": ["AskYoDa", "Translathor", "X-Merge"], "type": "string", "description": "* `AskYoDa` - Askyoda\n* `Translathor` - Translathor\n* `X-Merge` - X Merge"}, "PromptCallRequest": {"type": "object", "properties": {"model": {"type": "string", "nullable": true, "minLength": 1, "description": "Optional model override (e.g., 'openai/gpt-4o'). If not provided, the model specified in the prompt will be used."}, "prompt_context": {"type": "object", "additionalProperties": {}, "nullable": true, "default": {}, "description": "Variables to inject into the prompt template.\n#### Example\n```json\n{\n \"my_variable\": \"some_value\",\n}\n```"}, "params": {"type": "object", "additionalProperties": {}, "nullable": true, "default": {}, "description": "Optional params override. If not provided, the default params in the prompt will be used\n\nParams that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models. Overrides the file urls set on the prompt."}}}, "PromptCreate": {"type": "object", "properties": {"id": {"type": "integer", "readOnly": true}, "name": {"type": "string", "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "current_version": {"type": "integer", "readOnly": true, "nullable": true}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true}, "history_count": {"type": "integer", "readOnly": true}}, "required": ["created_at", "current_version", "history_count", "id", "name", "updated_at"]}, "PromptCreateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "writeOnly": true, "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "writeOnly": true, "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"writeOnly": true, "description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}, "writeOnly": true, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "writeOnly": true, "minLength": 1, "description": "Specify a system prompt for the LLM"}}, "required": ["model", "name", "text"]}, "PromptDataClass": {"properties": {"text": {"title": "Text", "type": "string"}}, "required": ["text"], "title": "PromptDataClass", "type": "object"}, "PromptHistory": {"type": "object", "properties": {"id": {"type": "integer", "readOnly": true}, "version": {"type": "integer", "readOnly": true}, "text": {"type": "string", "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "name": {"type": "string", "readOnly": true}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "maxLength": 1000}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "description": "Specify a system prompt for the LLM"}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true}}, "required": ["created_at", "id", "name", "updated_at", "version"]}, "PromptHistoryRequest": {"type": "object", "properties": {"text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "file_urls": {"type": "array", "items": {"type": "string", "format": "uri", "minLength": 1, "maxLength": 1000}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "PromptUpdate": {"type": "object", "properties": {"id": {"type": "integer", "readOnly": true}, "name": {"type": "string", "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "current_version": {"type": "integer", "description": "The production version of the prompt, that is used by default when you call the prompt"}, "created_at": {"type": "string", "format": "date-time", "readOnly": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true}, "file_urls": {"type": "array", "items": {"type": "string"}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "nullable": true, "description": "Specify a system prompt for the LLM"}}, "required": ["created_at", "id", "updated_at"]}, "PromptUpdateRequest": {"type": "object", "properties": {"name": {"type": "string", "minLength": 1, "description": "The unique identifier for the prompt. Must contain only alphanumeric characters, underscores (_) and hyphens (-). No spaces allowed."}, "text": {"type": "string", "minLength": 1, "description": "The text used for the prompt\n\nYou can include prompt variables with {{ my_variable }}. These variables are injected when you later call your prompt.\n\nThe template language is compatible with jinja2\n\n#### Example\n```bash\nTranslate this word to {{ language }}: {{ word }}\n```"}, "model": {"type": "string", "minLength": 1, "description": "The model used to call the prompt. E.g. openai/gpt-4o"}, "params": {"description": "Params that are passed on to the llm request. See llm chat docs for more details.\n\n#### Example\n```json\n{\n \"temperature\": 0.7,\n \"max_tokens\": 100\n}\n```"}, "current_version": {"type": "integer", "description": "The production version of the prompt, that is used by default when you call the prompt"}, "file_urls": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "Optional list of URLs to images or other files that should be included with the prompt for multimodal models. Files are not supported by all models."}, "system_prompt": {"type": "string", "nullable": true, "minLength": 1, "description": "Specify a system prompt for the LLM"}}}, "Provider": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 255}, "fullname": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true, "maxLength": 1000}}, "required": ["name"]}, "ProviderSubfeature": {"type": "object", "properties": {"name": {"type": "string", "readOnly": true}, "version": {"type": "string", "readOnly": true}, "pricings": {"type": "array", "items": {"$ref": "#/components/schemas/PricingSerialzier"}}, "is_working": {"type": "boolean"}, "description_title": {"type": "string", "nullable": true, "maxLength": 100}, "description_content": {"type": "string", "nullable": true, "maxLength": 1000}, "provider": {"$ref": "#/components/schemas/Provider"}, "feature": {"$ref": "#/components/schemas/Feature"}, "subfeature": {"$ref": "#/components/schemas/Subfeature"}, "constraints": {"type": "object", "additionalProperties": {}, "readOnly": true}, "models": {"type": "object", "additionalProperties": {}, "readOnly": true}, "tts_models": {"type": "object", "additionalProperties": {}, "readOnly": true}, "llm_details": {"type": "object", "additionalProperties": {}, "readOnly": true}, "languages": {"type": "array", "items": {"type": "object", "properties": {"language_name": {"type": "string"}, "language_code": {"type": "string"}}, "required": ["language_code", "language_name"]}, "readOnly": true}, "phase": {"type": "string", "readOnly": true}, "regions": {"type": "array", "items": {"type": "string", "description": "2-letter ISO 3166-1 alpha-2 country code (e.g., 'us', 'eu', 'fr')"}, "readOnly": true}}, "required": ["constraints", "feature", "languages", "llm_details", "models", "name", "phase", "pricings", "provider", "regions", "subfeature", "tts_models", "version"]}, "QuestionAnswerAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mp4|mpeg|mov|avi|x\\-flx|mpg|webm|wmv|3gpp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "maximum": 3000000, "minimum": 1, "description": "The maximum number of tokens to generate in the completion. This value, combined with the token count of your prompt, cannot exceed the model's context length."}, "text": {"type": "string", "minLength": 1, "description": "Question about the video"}}, "required": ["providers", "text"]}, "ReasoningEffortEnum": {"enum": ["low", "medium", "high"], "type": "string", "description": "**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "RepresentationEnum": {"enum": ["document", "query", "symetric"], "type": "string", "description": "* `document` - document\n* `query` - query\n* `symetric` - symetric"}, "ResourceCreate": {"type": "object", "properties": {"resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "maxLength": 100}}, "required": ["data", "provider", "resource", "type"]}, "ResourceCreateRequest": {"type": "object", "properties": {"resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "minLength": 1, "maxLength": 100}}, "required": ["data", "provider", "resource", "type"]}, "ResourceList": {"type": "object", "properties": {"resource": {"type": "string", "maxLength": 255}, "data": {"type": "string", "format": "byte", "readOnly": true, "nullable": true}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "maxLength": 100}, "assets": {"type": "array", "items": {"$ref": "#/components/schemas/AssetList"}}}, "required": ["assets", "data", "provider", "resource", "type"]}, "ResourceUpdate": {"type": "object", "properties": {"resource": {"type": "string", "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "maxLength": 100}, "assets": {"type": "array", "items": {"$ref": "#/components/schemas/AssetList"}, "readOnly": true}}, "required": ["assets", "data", "provider", "resource", "type"]}, "ResourceUpdateRequest": {"type": "object", "properties": {"resource": {"type": "string", "minLength": 1, "maxLength": 255}, "data": {"type": "object", "additionalProperties": {}}, "type": {"$ref": "#/components/schemas/TypeDe8Enum"}, "provider": {"type": "string", "minLength": 1, "maxLength": 100}}, "required": ["data", "provider", "resource", "type"]}, "ResumeEducation": {"properties": {"total_years_education": {"title": "Total Years Education", "type": "integer"}, "entries": {"items": {"$ref": "#/components/schemas/ResumeEducationEntry"}, "title": "Entries", "type": "array"}}, "required": ["total_years_education"], "title": "ResumeEducation", "type": "object"}, "ResumeEducationEntry": {"properties": {"title": {"title": "Title", "type": "string"}, "start_date": {"title": "Start Date", "type": "string"}, "end_date": {"title": "End Date", "type": "string"}, "location": {"$ref": "#/components/schemas/ResumeLocation"}, "establishment": {"title": "Establishment", "type": "string"}, "description": {"title": "Description", "type": "string"}, "gpa": {"title": "Gpa", "type": "string"}, "accreditation": {"title": "Accreditation", "type": "string"}}, "required": ["title", "start_date", "end_date", "location", "establishment", "description", "gpa", "accreditation"], "title": "ResumeEducationEntry", "type": "object"}, "ResumeExtractedData": {"properties": {"personal_infos": {"$ref": "#/components/schemas/ResumePersonalInfo"}, "education": {"$ref": "#/components/schemas/ResumeEducation"}, "work_experience": {"$ref": "#/components/schemas/ResumeWorkExp"}, "languages": {"items": {"$ref": "#/components/schemas/ResumeLang"}, "title": "Languages", "type": "array"}, "skills": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Skills", "type": "array"}, "certifications": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Certifications", "type": "array"}, "courses": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Courses", "type": "array"}, "publications": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Publications", "type": "array"}, "interests": {"items": {"$ref": "#/components/schemas/ResumeSkill"}, "title": "Interests", "type": "array"}}, "required": ["personal_infos", "education", "work_experience"], "title": "ResumeExtractedData", "type": "object"}, "ResumeLang": {"properties": {"name": {"title": "Name", "type": "string"}, "code": {"title": "Code", "type": "string"}}, "required": ["name", "code"], "title": "ResumeLang", "type": "object"}, "ResumeLocation": {"properties": {"formatted_location": {"title": "Formatted Location", "type": "string"}, "postal_code": {"title": "Postal Code", "type": "string"}, "region": {"title": "Region", "type": "string"}, "country": {"title": "Country", "type": "string"}, "country_code": {"title": "Country Code", "type": "string"}, "raw_input_location": {"title": "Raw Input Location", "type": "string"}, "street": {"title": "Street", "type": "string"}, "street_number": {"title": "Street Number", "type": "string"}, "appartment_number": {"title": "Appartment Number", "type": "string"}, "city": {"title": "City", "type": "string"}}, "required": ["formatted_location", "postal_code", "region", "country", "country_code", "raw_input_location", "street", "street_number", "appartment_number", "city"], "title": "ResumeLocation", "type": "object"}, "ResumePersonalInfo": {"properties": {"name": {"$ref": "#/components/schemas/ResumePersonalName"}, "address": {"$ref": "#/components/schemas/ResumeLocation"}, "self_summary": {"title": "Self Summary", "type": "string"}, "objective": {"title": "Objective", "type": "string"}, "date_of_birth": {"title": "Date Of Birth", "type": "string"}, "place_of_birth": {"title": "Place Of Birth", "type": "string"}, "phones": {"items": {"type": "string"}, "title": "Phones", "type": "array"}, "mails": {"items": {"type": "string"}, "title": "Mails", "type": "array"}, "urls": {"items": {"type": "string"}, "title": "Urls", "type": "array"}, "fax": {"items": {"type": "string"}, "title": "Fax", "type": "array"}, "current_profession": {"title": "Current Profession", "type": "string"}, "gender": {"title": "Gender", "type": "string"}, "nationality": {"title": "Nationality", "type": "string"}, "martial_status": {"title": "Martial Status", "type": "string"}, "current_salary": {"title": "Current Salary", "type": "string"}, "availability": {"default": null, "title": "Availability", "type": "string"}}, "required": ["name", "address", "self_summary", "objective", "date_of_birth", "place_of_birth", "current_profession", "gender", "nationality", "martial_status", "current_salary"], "title": "ResumePersonalInfo", "type": "object"}, "ResumePersonalName": {"properties": {"first_name": {"title": "First Name", "type": "string"}, "last_name": {"title": "Last Name", "type": "string"}, "raw_name": {"title": "Raw Name", "type": "string"}, "middle": {"title": "Middle", "type": "string"}, "title": {"title": "Title", "type": "string"}, "prefix": {"title": "Prefix", "type": "string"}, "sufix": {"title": "Sufix", "type": "string"}}, "required": ["first_name", "last_name", "raw_name", "middle", "title", "prefix", "sufix"], "title": "ResumePersonalName", "type": "object"}, "ResumeSkill": {"properties": {"name": {"title": "Name", "type": "string"}, "type": {"title": "Type", "type": "string"}}, "required": ["name", "type"], "title": "ResumeSkill", "type": "object"}, "ResumeWorkExp": {"properties": {"total_years_experience": {"title": "Total Years Experience", "type": "string"}, "entries": {"items": {"$ref": "#/components/schemas/ResumeWorkExpEntry"}, "title": "Entries", "type": "array"}}, "required": ["total_years_experience"], "title": "ResumeWorkExp", "type": "object"}, "ResumeWorkExpEntry": {"properties": {"title": {"title": "Title", "type": "string"}, "start_date": {"title": "Start Date", "type": "string"}, "end_date": {"title": "End Date", "type": "string"}, "company": {"title": "Company", "type": "string"}, "location": {"$ref": "#/components/schemas/ResumeLocation"}, "description": {"title": "Description", "type": "string"}, "type": {"default": null, "title": "Type", "type": "string"}, "industry": {"title": "Industry", "type": "string"}}, "required": ["title", "start_date", "end_date", "company", "location", "description", "industry"], "title": "ResumeWorkExpEntry", "type": "object"}, "Row": {"properties": {"cells": {"items": {"$ref": "#/components/schemas/Cell"}, "title": "Cells", "type": "array"}}, "title": "Row", "type": "object"}, "SegmentSentimentAnalysisDataClass": {"description": "This class is used in SentimentAnalysisDataClass to describe each segment analyzed.\n\nArgs:\n - segment (str): The segment analyzed\n - sentiment (Literal['Positve', 'Negative', 'Neutral']) (Case is ignore): Sentiment of segment\n - sentiment_rate (float between 0 and 1): Rate of sentiment", "properties": {"segment": {"title": "Segment", "type": "string"}, "sentiment": {"allOf": [{"$ref": "#/components/schemas/SentimentEbfEnum"}], "title": "Sentiment"}, "sentiment_rate": {"maximum": 1.0, "minimum": 0.0, "title": "Sentiment Rate", "type": "integer"}}, "required": ["segment", "sentiment", "sentiment_rate"], "title": "SegmentSentimentAnalysisDataClass", "type": "object"}, "SentimentEbfEnum": {"enum": ["Positive", "Negative", "Neutral"], "type": "string"}, "ServiceTierEnum": {"enum": ["auto", "default"], "type": "string", "description": "**Choices**:\n- 'auto': Automatically select appropriate tier\n- 'default': Use default service configuration\n\n* `auto` - auto\n* `default` - default"}, "ShotFrame": {"properties": {"startTimeOffset": {"title": "Starttimeoffset", "type": "integer"}, "endTimeOffset": {"title": "Endtimeoffset", "type": "integer"}}, "required": ["startTimeOffset", "endTimeOffset"], "title": "ShotFrame", "type": "object"}, "SpeechDiarization": {"properties": {"total_speakers": {"title": "Total Speakers", "type": "integer"}, "entries": {"items": {"$ref": "#/components/schemas/SpeechDiarizationEntry"}, "title": "Entries", "type": "array"}, "error_message": {"default": null, "title": "Error Message", "type": "string"}}, "required": ["total_speakers"], "title": "SpeechDiarization", "type": "object"}, "SpeechDiarizationEntry": {"properties": {"segment": {"title": "Segment", "type": "string"}, "start_time": {"title": "Start Time", "type": "string"}, "end_time": {"title": "End Time", "type": "string"}, "speaker": {"title": "Speaker", "type": "integer"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["segment", "start_time", "end_time", "speaker", "confidence"], "title": "SpeechDiarizationEntry", "type": "object"}, "SpeechToTextAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}, "show_base_64": {"type": "boolean", "default": true}, "provider_params": {"type": "string", "description": "\nParameters specific to the provider that you want to send along the request.\n\nit should take a *provider* name as key and an object of parameters as value.\n\nExample:\n\n {\n \"deepgram\": {\n \"filler_words\": true,\n \"smart_format\": true,\n \"callback\": \"https://webhook.site/0000\"\n },\n \"assembly\": {\n \"webhook_url\": \"https://webhook.site/0000\"\n }\n }\n\nPlease refer to the documentation of each provider to see which parameters to send.\n"}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "language": {"type": "string", "nullable": true, "description": "Language code expected (ex: en, fr)"}, "speakers": {"type": "integer", "nullable": true, "default": 2, "description": "Number of speakers in the file audio"}, "profanity_filter": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather or not the service will filter profanity and replace inappropriate words with a series of asterisks"}, "custom_vocabulary": {"type": "string", "default": "", "description": "List of words or composed words to be detected by the speech to text engine. (Ex: Word, Mike, Draw, Los Angeles,...)"}, "convert_to_wav": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the audio/video file to wav format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "SpellCheckItem": {"description": "Represents a spell check item with suggestions.\n\nArgs:\n text (str): The text to spell check.\n type (str, optional): The type of the text.\n offset (int): The offset of the text.\n length (int): The length of the text.\n suggestions (Sequence[SuggestionItem], optional): The list of suggestions for the misspelled text.\n\nRaises:\n ValueError: If the offset or length is not positive.\n\nReturns:\n SpellCheckItem: An instance of the SpellCheckItem class.", "properties": {"text": {"title": "Text", "type": "string"}, "type": {"title": "Type", "type": "string"}, "offset": {"minimum": 0, "title": "Offset", "type": "integer"}, "length": {"minimum": 0, "title": "Length", "type": "integer"}, "suggestions": {"items": {"$ref": "#/components/schemas/SuggestionItem"}, "title": "Suggestions", "type": "array"}}, "required": ["text", "type", "offset", "length"], "title": "SpellCheckItem", "type": "object"}, "StateEnum": {"enum": ["finished", "failed", "Timeout error", "processing"], "type": "string", "description": "* `finished` - finished\n* `failed` - failed\n* `Timeout error` - Timeout error\n* `processing` - processing"}, "Status549Enum": {"enum": ["sucess", "fail"], "type": "string"}, "Status889Enum": {"enum": ["succeeded", "failed", "finished", "processing"], "type": "string", "description": "* `succeeded` - Status Succeeded\n* `failed` - Status Failed\n* `finished` - Status Finished\n* `processing` - Status Processing"}, "SubCategoryType": {"enum": ["CreditCard", "CardExpiry", "BankAccountNumber", "BankRoutingNumber", "SwiftCode", "TaxIdentificationNumber", "Name", "Age", "Email", "Phone", "PersonType", "Gender", "SocialSecurityNumber", "NationalIdentificationNumber", "NationalHealthService", "ResidentRegistrationNumber", "DriverLicenseNumber", "PassportNumber", "URL", "IP", "MAC", "VehicleIdentificationNumber", "LicensePlate", "VoterNumber", "AWSKeys", "AzureKeys", "Password", "CompanyName", "CompanyNumber", "BuisnessNumber", "Date", "Time", "DateTime", "Duration", "Address", "Location", "Other", "Anonymized", "Nerd", "Wsd", "Unknown"], "title": "SubCategoryType", "type": "string"}, "Subfeature": {"type": "object", "properties": {"name": {"type": "string", "maxLength": 255}, "fullname": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true, "maxLength": 1000}}, "required": ["name"]}, "SuggestionItem": {"description": "Represents a suggestion for a misspelled word.\n\nArgs:\n suggestion (str): The suggested text.\n score (float, optional): The score of the suggested text (between 0 and 1).\n\nRaises:\n ValueError: If the score is not between 0 and 1.\n\nReturns:\n SuggestionItem: An instance of the SuggestionItem class.", "properties": {"suggestion": {"title": "Suggestion", "type": "string"}, "score": {"maximum": 1.0, "minimum": 0.0, "title": "Score", "type": "integer"}}, "required": ["suggestion", "score"], "title": "SuggestionItem", "type": "object"}, "Table": {"properties": {"rows": {"items": {"$ref": "#/components/schemas/Row"}, "title": "Rows", "type": "array"}, "num_rows": {"title": "Num Rows", "type": "integer"}, "num_cols": {"title": "Num Cols", "type": "integer"}}, "required": ["num_rows", "num_cols"], "title": "Table", "type": "object"}, "TextModerationItem": {"properties": {"label": {"title": "Label", "type": "string"}, "likelihood": {"title": "Likelihood", "type": "integer"}, "category": {"$ref": "#/components/schemas/CategoryType"}, "subcategory": {"$ref": "#/components/schemas/SubCategoryType"}, "likelihood_score": {"title": "Likelihood Score", "type": "integer"}}, "required": ["label", "likelihood", "category", "subcategory", "likelihood_score"], "title": "TextModerationItem", "type": "object"}, "TextToSpeechAsyncRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "default": "", "description": "Language code expected (ex: en, fr)"}, "option": {"default": "", "oneOf": [{"$ref": "#/components/schemas/OptionEnum"}, {"$ref": "#/components/schemas/BlankEnum"}]}, "rate": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking rate by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "pitch": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking pitch by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "volume": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the audio volume by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "audio_format": {"type": "string", "nullable": true, "default": "", "description": "Optional parameter to specify the audio format in which the audio will be generated. By default, audios are encoded in MP3, except for lovoai which use the wav container."}, "sampling_rate": {"type": "integer", "maximum": 200000, "minimum": 0, "nullable": true, "default": 0, "description": "Optional. The synthesis sample rate (in hertz) for this audio. When this is specified, the audio will be converted either to the right passed value, or to a the nearest value acceptable by the provider"}, "webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}, "required": ["providers", "text"]}, "ThinkingRequest": {"type": "object", "properties": {"type": {"$ref": "#/components/schemas/ThinkingTypeEnum"}, "budget_tokens": {"type": "integer", "minimum": 1024}}, "required": ["budget_tokens", "type"]}, "ThinkingTypeEnum": {"enum": ["enabled", "disabled"], "type": "string", "description": "* `enabled` - enabled\n* `disabled` - disabled"}, "TokenData": {"properties": {"token": {"title": "Token", "type": "string"}, "data": {"additionalProperties": {"additionalProperties": {"$ref": "#/components/schemas/Details"}, "type": "object"}, "title": "Data", "type": "object"}}, "required": ["token", "data"], "title": "TokenData", "type": "object"}, "TokenTypeEnum": {"enum": ["sandbox_api_token", "api_token"], "type": "string", "description": "* `sandbox_api_token` - Sandbox\n* `api_token` - Back"}, "ToolCall": {"properties": {"id": {"title": "Id", "type": "string"}, "name": {"title": "Name", "type": "string"}, "arguments": {"title": "Arguments", "type": "string"}}, "required": ["id", "name", "arguments"], "title": "ToolCall", "type": "object"}, "ToolChoiceEnum": {"enum": ["auto", "required", "none"], "type": "string", "description": "* `auto` - auto\n* `required` - required\n* `none` - none"}, "TypeDe8Enum": {"enum": ["db", "bucket", "db_vector", "ai"], "type": "string", "description": "* `db` - Db\n* `bucket` - Bucket\n* `db_vector` - Db Vector\n* `ai` - Ai"}, "TypeOfDataEnum": {"enum": ["TRAINING", "TEST"], "type": "string", "description": "* `TRAINING` - TRAINING\n* `TEST` - TEST"}, "UpperCloth": {"properties": {"value": {"title": "Value", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["value", "confidence"], "title": "UpperCloth", "type": "object"}, "UsageTokensDetails": {"properties": {"audio_tokens": {"description": "Number of audio tokens in the prompt", "title": "Audio Tokens", "type": "integer"}, "cached_tokens": {"description": "Number of cached tokens in the prompt", "title": "Cached Tokens", "type": "integer"}, "prompt_tokens": {"description": "Number of tokens in the prompt", "title": "Prompt Tokens", "type": "integer"}, "completion_tokens": {"description": "Number of tokens in the generated completion", "title": "Completion Tokens", "type": "integer"}, "total_tokens": {"description": "Total number of tokens used (prompt + completion)", "title": "Total Tokens", "type": "integer"}, "accepted_prediction_tokens": {"description": "Number of accepted tokens in the prompt", "title": "Accepted Prediction Tokens", "type": "integer"}, "reasoning_tokens": {"description": "Number of reasoning tokens in the prompt", "title": "Reasoning Tokens", "type": "integer"}, "rejected_prediction_tokens": {"description": "Number of rejected tokens in the prompt", "title": "Rejected Prediction Tokens", "type": "integer"}}, "required": ["audio_tokens", "cached_tokens", "prompt_tokens", "completion_tokens", "total_tokens", "accepted_prediction_tokens", "reasoning_tokens", "rejected_prediction_tokens"], "title": "UsageTokensDetails", "type": "object"}, "VideoBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoBoundingBox", "type": "object"}, "VideoFace": {"properties": {"offset": {"title": "Offset", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoBoundingBox"}, "attributes": {"$ref": "#/components/schemas/FaceAttributes"}, "landmarks": {"$ref": "#/components/schemas/LandmarksVideo"}}, "required": ["offset", "bounding_box", "attributes", "landmarks"], "title": "VideoFace", "type": "object"}, "VideoFacePoses": {"properties": {"pitch": {"title": "Pitch", "type": "integer"}, "roll": {"title": "Roll", "type": "integer"}, "yawn": {"title": "Yawn", "type": "integer"}}, "required": ["pitch", "roll", "yawn"], "title": "VideoFacePoses", "type": "object"}, "VideoLabel": {"properties": {"name": {"title": "Name", "type": "string"}, "confidence": {"title": "Confidence", "type": "integer"}, "timestamp": {"items": {"$ref": "#/components/schemas/VideoLabelTimeStamp"}, "title": "Timestamp", "type": "array"}, "category": {"items": {"type": "string"}, "title": "Category", "type": "array"}, "bounding_box": {"items": {"$ref": "#/components/schemas/VideoLabelBoundingBox"}, "title": "Bounding Box", "type": "array"}}, "required": ["name", "confidence"], "title": "VideoLabel", "type": "object"}, "VideoLabelBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoLabelBoundingBox", "type": "object"}, "VideoLabelTimeStamp": {"properties": {"start": {"title": "Start", "type": "integer"}, "end": {"title": "End", "type": "integer"}}, "required": ["start", "end"], "title": "VideoLabelTimeStamp", "type": "object"}, "VideoLogo": {"properties": {"timestamp": {"title": "Timestamp", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoLogoBoundingBox"}, "confidence": {"title": "Confidence", "type": "integer"}}, "required": ["timestamp", "bounding_box", "confidence"], "title": "VideoLogo", "type": "object"}, "VideoLogoBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoLogoBoundingBox", "type": "object"}, "VideoObjectBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoObjectBoundingBox", "type": "object"}, "VideoPersonPoses": {"properties": {"pitch": {"title": "Pitch", "type": "integer"}, "roll": {"title": "Roll", "type": "integer"}, "yaw": {"title": "Yaw", "type": "integer"}}, "required": ["pitch", "roll", "yaw"], "title": "VideoPersonPoses", "type": "object"}, "VideoPersonQuality": {"properties": {"brightness": {"title": "Brightness", "type": "integer"}, "sharpness": {"title": "Sharpness", "type": "integer"}}, "required": ["brightness", "sharpness"], "title": "VideoPersonQuality", "type": "object"}, "VideoText": {"properties": {"text": {"title": "Text", "type": "string"}, "frames": {"items": {"$ref": "#/components/schemas/VideoTextFrames"}, "title": "Frames", "type": "array"}}, "required": ["text"], "title": "VideoText", "type": "object"}, "VideoTextBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoTextBoundingBox", "type": "object"}, "VideoTextFrames": {"properties": {"confidence": {"title": "Confidence", "type": "integer"}, "timestamp": {"title": "Timestamp", "type": "integer"}, "bounding_box": {"$ref": "#/components/schemas/VideoTextBoundingBox"}}, "required": ["confidence", "timestamp", "bounding_box"], "title": "VideoTextFrames", "type": "object"}, "VideoTrackingBoundingBox": {"properties": {"top": {"title": "Top", "type": "integer"}, "left": {"title": "Left", "type": "integer"}, "height": {"title": "Height", "type": "integer"}, "width": {"title": "Width", "type": "integer"}}, "required": ["top", "left", "height", "width"], "title": "VideoTrackingBoundingBox", "type": "object"}, "VideoTrackingPerson": {"properties": {"tracked": {"items": {"$ref": "#/components/schemas/PersonTracking"}, "title": "Tracked", "type": "array"}}, "title": "VideoTrackingPerson", "type": "object"}, "WebhookParameters": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}}, "WebhookParametersRequest": {"type": "object", "properties": {"webhook_receiver": {"type": "string", "format": "uri", "minLength": 1, "description": "Webhook receiver should be a valid https URL (ex : https://your.listner.com/endpoint). After the processing is done, the webhook endpoint will receive a POST request with the result."}, "users_webhook_parameters": {"description": "Json data that contains of additional parameters that will be sent back to the webhook receiver (ex: api key for security or client's data ID to link the result internally). Will only be used when webhook_receiver is set."}, "send_webhook_data": {"type": "boolean", "default": true, "description": "If set to false the webhook will not contain the result data. Use if your webhook receiver has a request size limit."}}}, "Word": {"description": "Word of a document\n\nAttributes:\n text (str): Text detected in the word\n bounding_boxes (Sequence[BoundingBox]): Bounding boxes of the words in the word\n confidence (float): Confidence score of the word", "properties": {"text": {"description": "Text detected in the word", "title": "Text", "type": "string"}, "bounding_box": {"allOf": [{"$ref": "#/components/schemas/BoundingBox"}], "description": "Bounding boxes of the words in the word"}, "confidence": {"description": "Confidence score of the word", "title": "Confidence", "type": "integer"}}, "required": ["text", "bounding_box", "confidence"], "title": "Word", "type": "object"}, "Workflow": {"type": "object", "properties": {"id": {"type": "string", "format": "uuid", "readOnly": true}, "name": {"type": "string", "nullable": true, "maxLength": 255}, "description": {"type": "string", "nullable": true}, "content": {"type": "array", "items": {"type": "object", "additionalProperties": {}}}, "created_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}, "updated_at": {"type": "string", "format": "date-time", "readOnly": true, "nullable": true}, "output_node": {"type": "string", "nullable": true, "maxLength": 200}, "is_empty": {"type": "boolean", "readOnly": true}, "template": {"type": "integer", "nullable": true}, "code": {"type": "object", "additionalProperties": {}}, "webhook": {"type": "string", "readOnly": true}}, "required": ["created_at", "id", "is_empty", "updated_at", "webhook"]}, "YodaAskLlmResponse": {"properties": {"result": {"title": "Result", "type": "string"}, "llm_provider": {"title": "Llm Provider", "type": "string"}, "llm_model": {"title": "Llm Model", "type": "string"}}, "required": ["result", "llm_provider", "llm_model"], "title": "YodaAskLlmResponse", "type": "object"}, "YodaCreateProjectResponse": {"properties": {"project_id": {"format": "uuid", "title": "Project Id", "type": "string"}}, "required": ["project_id"], "title": "YodaCreateProjectResponse", "type": "object"}, "YodaDeleteResponse": {"properties": {"result": {"default": "Done!", "title": "Result", "type": "string"}}, "title": "YodaDeleteResponse", "type": "object"}, "YodaInfoResponse": {"properties": {"db_provider": {"title": "Db Provider", "type": "string"}, "embeddings_provider": {"title": "Embeddings Provider", "type": "string"}, "llm_provider": {"title": "Llm Provider", "type": "string"}, "llm_model": {"title": "Llm Model", "type": "string"}, "collection_size": {"title": "Collection Size", "type": "integer"}}, "required": ["db_provider", "embeddings_provider", "llm_provider", "llm_model", "collection_size"], "title": "YodaInfoResponse", "type": "object"}, "YodaListChunksIdsResponse": {"properties": {"chunks_ids": {"items": {"type": "string"}, "title": "Chunks Ids", "type": "array"}}, "required": ["chunks_ids"], "title": "YodaListChunksIdsResponse", "type": "object"}, "YodaQueryResponse": {"properties": {"result": {"items": {"$ref": "#/components/schemas/YodaQueryResponseItem"}, "title": "Result", "type": "array"}}, "required": ["result"], "title": "YodaQueryResponse", "type": "object"}, "YodaQueryResponseItem": {"properties": {"id": {"format": "uuid", "title": "Id", "type": "string"}, "version": {"title": "Version", "type": "integer"}, "score": {"title": "Score", "type": "integer"}, "payload": {"$ref": "#/components/schemas/YodaQueryResponsePayload"}, "vector": {"title": "Vector"}}, "required": ["id", "version", "score", "payload", "vector"], "title": "YodaQueryResponseItem", "type": "object"}, "YodaQueryResponsePayload": {"properties": {"metadata": {"title": "Metadata", "type": "object"}, "page_content": {"title": "Page Content", "type": "string"}}, "required": ["metadata", "page_content"], "title": "YodaQueryResponsePayload", "type": "object"}, "asyncaudiospeech_to_text_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/audiospeech_to_text_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncaudiospeech_to_text_asyncResponseModel", "type": "object"}, "asyncaudiotext_to_speech_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/audiotext_to_speech_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncaudiotext_to_speech_asyncResponseModel", "type": "object"}, "asyncocranonymization_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocranonymization_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocranonymization_asyncResponseModel", "type": "object"}, "asyncocrcustom_document_parsing_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocrcustom_document_parsing_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocrcustom_document_parsing_asyncResponseModel", "type": "object"}, "asyncocrocr_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocrocr_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocrocr_asyncResponseModel", "type": "object"}, "asyncocrocr_tables_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/ocrocr_tables_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncocrocr_tables_asyncResponseModel", "type": "object"}, "asyncvideodeepfake_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videodeepfake_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideodeepfake_detection_asyncResponseModel", "type": "object"}, "asyncvideoexplicit_content_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoexplicit_content_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoexplicit_content_detection_asyncResponseModel", "type": "object"}, "asyncvideoface_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoface_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoface_detection_asyncResponseModel", "type": "object"}, "asyncvideogeneration_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videogeneration_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideogeneration_asyncResponseModel", "type": "object"}, "asyncvideolabel_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videolabel_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideolabel_detection_asyncResponseModel", "type": "object"}, "asyncvideologo_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videologo_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideologo_detection_asyncResponseModel", "type": "object"}, "asyncvideoobject_tracking_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoobject_tracking_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoobject_tracking_asyncResponseModel", "type": "object"}, "asyncvideoperson_tracking_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoperson_tracking_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoperson_tracking_asyncResponseModel", "type": "object"}, "asyncvideoquestion_answer_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoquestion_answer_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoquestion_answer_asyncResponseModel", "type": "object"}, "asyncvideoshot_change_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videoshot_change_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideoshot_change_detection_asyncResponseModel", "type": "object"}, "asyncvideotext_detection_asyncResponseModel": {"properties": {"results": {"$ref": "#/components/schemas/videotext_detection_asyncModel"}, "error": {"title": "Error", "type": "string"}, "public_id": {"format": "uuid", "title": "Public Id", "type": "string"}, "status": {"title": "Status", "type": "string"}}, "required": ["results", "error", "public_id", "status"], "title": "asyncvideotext_detection_asyncResponseModel", "type": "object"}, "audiospeech_to_text_asyncModel": {"properties": {"deepgram": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "symbl": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "speechmatics": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "faker": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "gladia": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "assembly": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "voci": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "voxist": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/audiospeech_to_text_asyncSpeechToTextAsyncDataClass"}], "default": null}}, "title": "audiospeech_to_text_asyncModel", "type": "object"}, "audiospeech_to_text_asyncSpeechToTextAsyncDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "diarization": {"$ref": "#/components/schemas/SpeechDiarization"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["text", "diarization", "id", "final_status"], "title": "audiospeech_to_text_asyncSpeechToTextAsyncDataClass", "type": "object"}, "audiotext_to_speechResponseModel": {"properties": {"lovoai": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "deepgram": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "elevenlabs": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speechTextToSpeechDataClass"}], "default": null}}, "title": "audiotext_to_speechResponseModel", "type": "object"}, "audiotext_to_speechTextToSpeechDataClass": {"properties": {"audio": {"title": "Audio", "type": "string"}, "voice_type": {"title": "Voice Type", "type": "integer"}, "audio_resource_url": {"title": "Audio Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["audio", "voice_type", "audio_resource_url", "status"], "title": "audiotext_to_speechTextToSpeechDataClass", "type": "object"}, "audiotext_to_speechTextToSpeechRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "default": "", "description": "Language code expected (ex: en, fr)"}, "option": {"default": "", "oneOf": [{"$ref": "#/components/schemas/OptionEnum"}, {"$ref": "#/components/schemas/BlankEnum"}]}, "rate": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking rate by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "pitch": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the speaking pitch by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "volume": {"type": "integer", "maximum": 100, "minimum": -100, "nullable": true, "default": 0, "description": "Increase or decrease the audio volume by expressing a positif or negatif number ranging between 100 and -100 (a relative value as percentage varying from -100% to 100%)"}, "audio_format": {"type": "string", "nullable": true, "default": "", "description": "Optional parameter to specify the audio format in which the audio will be generated. By default, audios are encoded in MP3, except for lovoai which use the wav container."}, "sampling_rate": {"type": "integer", "maximum": 200000, "minimum": 0, "nullable": true, "default": 0, "description": "Optional. The synthesis sample rate (in hertz) for this audio. When this is specified, the audio will be converted either to the right passed value, or to a the nearest value acceptable by the provider"}}, "required": ["providers", "text"]}, "audiotext_to_speech_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speech_asyncTextToSpeechAsyncDataClass"}], "default": null}, "lovoai": {"allOf": [{"$ref": "#/components/schemas/audiotext_to_speech_asyncTextToSpeechAsyncDataClass"}], "default": null}}, "title": "audiotext_to_speech_asyncModel", "type": "object"}, "audiotext_to_speech_asyncTextToSpeechAsyncDataClass": {"properties": {"audio": {"title": "Audio", "type": "string"}, "voice_type": {"title": "Voice Type", "type": "integer"}, "audio_resource_url": {"title": "Audio Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["audio", "voice_type", "audio_resource_url", "id", "final_status"], "title": "audiotext_to_speech_asyncTextToSpeechAsyncDataClass", "type": "object"}, "credits_serializer": {"type": "object", "properties": {"credits": {"type": "number", "format": "double"}}, "required": ["credits"]}, "imageai_detectionAiDetectionDataClass": {"properties": {"ai_score": {"maximum": 1.0, "minimum": 0.0, "title": "Ai Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/ImageaiDetectionAiDetectionDataClassPredictionEnum"}], "title": "Prediction"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["ai_score", "prediction", "status"], "title": "imageai_detectionAiDetectionDataClass", "type": "object"}, "imageai_detectionAiDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageai_detectionResponseModel": {"properties": {"winstonai": {"allOf": [{"$ref": "#/components/schemas/imageai_detectionAiDetectionDataClass"}], "default": null}}, "title": "imageai_detectionResponseModel", "type": "object"}, "imageanonymizationAnonymizationDataClass": {"properties": {"image": {"title": "Image", "type": "string"}, "image_resource_url": {"title": "Image Resource Url", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/AnonymizationItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["image", "image_resource_url", "status"], "title": "imageanonymizationAnonymizationDataClass", "type": "object"}, "imageanonymizationResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/imageanonymizationAnonymizationDataClass"}], "default": null}}, "title": "imageanonymizationResponseModel", "type": "object"}, "imageanonymizationimagelandmark_detectionimageexplicit_contentimagedeepfake_detectionImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageautoml_classificationAutomlClassificationCreateProjectDataClass": {"properties": {"name": {"title": "Name", "type": "string"}, "project_id": {"title": "Project Id", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["name", "project_id", "status"], "title": "imageautoml_classificationAutomlClassificationCreateProjectDataClass", "type": "object"}, "imageautoml_classificationResponseModel": {"properties": {"nyckel": {"allOf": [{"$ref": "#/components/schemas/imageautoml_classificationAutomlClassificationCreateProjectDataClass"}], "default": null}}, "title": "imageautoml_classificationResponseModel", "type": "object"}, "imageautoml_classificationcreate_projectAutomlClassificationCreateProjectRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "name": {"type": "string", "minLength": 1, "description": "Name of your project", "maxLength": 250}}, "required": ["providers"]}, "imageautoml_classificationdelete_projectAutomlClassificationDeleteRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "project_id": {"type": "string", "minLength": 1, "description": "The id of project", "maxLength": 250}}, "required": ["project_id", "providers"]}, "imagebackground_removalBackgroundRemovalDataClass": {"properties": {"image_b64": {"description": "The image in base64 format.", "title": "Image B64", "type": "string"}, "image_resource_url": {"description": "The image url.", "title": "Image Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["image_b64", "image_resource_url", "status"], "title": "imagebackground_removalBackgroundRemovalDataClass", "type": "object"}, "imagebackground_removalBackgroundRemovalRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "provider_params": {"type": "string", "default": {}, "description": "Provider specific parameters"}}, "required": ["providers"]}, "imagebackground_removalResponseModel": {"properties": {"photoroom": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "sentisight": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "stabilityai": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "api4ai": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "clipdrop": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "picsart": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imagebackground_removalBackgroundRemovalDataClass"}], "default": null}}, "title": "imagebackground_removalResponseModel", "type": "object"}, "imagedeepfake_detectionDeepfakeDetectionDataClass": {"properties": {"deepfake_score": {"maximum": 1.0, "minimum": 0.0, "title": "Deepfake Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/PredictionB20Enum"}], "title": "Prediction"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["deepfake_score", "prediction", "status"], "title": "imagedeepfake_detectionDeepfakeDetectionDataClass", "type": "object"}, "imagedeepfake_detectionResponseModel": {"properties": {"sightengine": {"allOf": [{"$ref": "#/components/schemas/imagedeepfake_detectionDeepfakeDetectionDataClass"}], "default": null}}, "title": "imagedeepfake_detectionResponseModel", "type": "object"}, "imageembeddingsEmbeddingsDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/EmbeddingDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageembeddingsEmbeddingsDataClass", "type": "object"}, "imageembeddingsEmbeddingsRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "representation": {"allOf": [{"$ref": "#/components/schemas/RepresentationEnum"}], "description": "The type of embedding representation to embed the image with\n\n* `document` - document\n* `query` - query\n* `symetric` - symetric"}}, "required": ["providers", "representation"]}, "imageembeddingsResponseModel": {"properties": {"alephalpha": {"allOf": [{"$ref": "#/components/schemas/imageembeddingsEmbeddingsDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageembeddingsEmbeddingsDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageembeddingsEmbeddingsDataClass"}], "default": null}}, "title": "imageembeddingsResponseModel", "type": "object"}, "imageexplicit_contentExplicitContentDataClass": {"properties": {"nsfw_likelihood": {"description": "An integer representing the likelihood of NSFW content. Higher values indicate a higher likelihood.", "title": "Nsfw Likelihood", "type": "integer"}, "nsfw_likelihood_score": {"description": "A floating-point score representing the confidence level of the NSFW likelihood assessment. This is typically a value between 0.0 and 1.0.", "title": "Nsfw Likelihood Score", "type": "integer"}, "items": {"description": "A list of items identified as potentially explicit. Each item contains details of the explicit content detected.", "items": {"$ref": "#/components/schemas/ExplicitItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["nsfw_likelihood", "nsfw_likelihood_score", "status"], "title": "imageexplicit_contentExplicitContentDataClass", "type": "object"}, "imageexplicit_contentResponseModel": {"properties": {"sentisight": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "api4ai": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageexplicit_contentExplicitContentDataClass"}], "default": null}}, "title": "imageexplicit_contentResponseModel", "type": "object"}, "imageface_compareFaceCompareDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/FaceMatch"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageface_compareFaceCompareDataClass", "type": "object"}, "imageface_compareFaceCompareRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file1": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file1_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file2": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file2_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageface_compareResponseModel": {"properties": {"base64": {"allOf": [{"$ref": "#/components/schemas/imageface_compareFaceCompareDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageface_compareFaceCompareDataClass"}], "default": null}, "facepp": {"allOf": [{"$ref": "#/components/schemas/imageface_compareFaceCompareDataClass"}], "default": null}}, "title": "imageface_compareResponseModel", "type": "object"}, "imageface_detectionFaceDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/FaceItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageface_detectionFaceDetectionDataClass", "type": "object"}, "imageface_detectionFaceDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageface_detectionResponseModel": {"properties": {"api4ai": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageface_detectionFaceDetectionDataClass"}], "default": null}}, "title": "imageface_detectionResponseModel", "type": "object"}, "imageface_recognitionFaceRecognitionAddFaceDataClass": {"properties": {"face_ids": {"items": {"type": "string"}, "title": "Face Ids", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["face_ids", "status"], "title": "imageface_recognitionFaceRecognitionAddFaceDataClass", "type": "object"}, "imageface_recognitionResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/imageface_recognitionFaceRecognitionAddFaceDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageface_recognitionFaceRecognitionAddFaceDataClass"}], "default": null}, "facepp": {"allOf": [{"$ref": "#/components/schemas/imageface_recognitionFaceRecognitionAddFaceDataClass"}], "default": null}}, "title": "imageface_recognitionResponseModel", "type": "object"}, "imageface_recognitionadd_faceFaceRecognitionAddFaceRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|bmp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageface_recognitiondelete_faceFaceRecognitionDeleteFaceRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "face_id": {"type": "string", "minLength": 1, "description": "ID of face to delete"}}, "required": ["face_id", "providers"]}, "imageface_recognitionrecognizeFaceRecognitionDetectFaceRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|bmp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imagegenerationGenerationDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/GeneratedImageDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagegenerationGenerationDataClass", "type": "object"}, "imagegenerationGenerationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Description of the desired image(s)."}, "resolution": {"type": "string", "minLength": 1, "description": "The image resolution (ex: 512x512, 1024x1024)", "maxLength": 50}, "num_images": {"type": "integer", "maximum": 10, "minimum": 1, "default": 1, "description": "The number of images to generate. Must be between 1 and 10."}}, "required": ["providers", "resolution", "text"]}, "imagegenerationResponseModel": {"properties": {"deepai": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "leonardo": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "stabilityai": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "bytedance": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "minimax": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}, "replicate": {"allOf": [{"$ref": "#/components/schemas/imagegenerationGenerationDataClass"}], "default": null}}, "title": "imagegenerationResponseModel", "type": "object"}, "imagelandmark_detectionLandmarkDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/LandmarkItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagelandmark_detectionLandmarkDetectionDataClass", "type": "object"}, "imagelandmark_detectionResponseModel": {"properties": {"microsoft": {"allOf": [{"$ref": "#/components/schemas/imagelandmark_detectionLandmarkDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imagelandmark_detectionLandmarkDetectionDataClass"}], "default": null}}, "title": "imagelandmark_detectionResponseModel", "type": "object"}, "imagelogo_detectionLogoDetectionDataClass": {"properties": {"items": {"description": "List of the detected brands logo from the image.", "items": {"$ref": "#/components/schemas/LogoItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagelogo_detectionLogoDetectionDataClass", "type": "object"}, "imagelogo_detectionLogoDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imagelogo_detectionResponseModel": {"properties": {"smartclick": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "api4ai": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imagelogo_detectionLogoDetectionDataClass"}], "default": null}}, "title": "imagelogo_detectionResponseModel", "type": "object"}, "imageobject_detectionObjectDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/ObjectItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imageobject_detectionObjectDetectionDataClass", "type": "object"}, "imageobject_detectionObjectDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}}, "required": ["providers"]}, "imageobject_detectionResponseModel": {"properties": {"sentisight": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "api4ai": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imageobject_detectionObjectDetectionDataClass"}], "default": null}}, "title": "imageobject_detectionResponseModel", "type": "object"}, "imagequestion_answerQuestionAnswerDataClass": {"properties": {"answers": {"items": {"type": "string"}, "title": "Answers", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "imagequestion_answerQuestionAnswerDataClass", "type": "object"}, "imagequestion_answerQuestionAnswerRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "question": {"type": "string", "minLength": 1, "description": "Question about the image"}, "max_tokens": {"type": "integer", "maximum": 2048, "minimum": 1, "default": 1000, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}}, "required": ["providers"]}, "imagequestion_answerResponseModel": {"properties": {"alephalpha": {"allOf": [{"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/imagequestion_answerQuestionAnswerDataClass"}], "default": null}}, "title": "imagequestion_answerResponseModel", "type": "object"}, "imagesearchResponseModel": {"properties": {"nyckel": {"allOf": [{"$ref": "#/components/schemas/imagesearchSearchDeleteImageDataClass"}], "default": null}, "sentisight": {"allOf": [{"$ref": "#/components/schemas/imagesearchSearchDeleteImageDataClass"}], "default": null}}, "title": "imagesearchResponseModel", "type": "object"}, "imagesearchSearchDeleteImageDataClass": {"properties": {"status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}}, "required": ["status"], "title": "imagesearchSearchDeleteImageDataClass", "type": "object"}, "imagesearchdelete_imageDeleteImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "image_name": {"type": "string", "minLength": 1}}, "required": ["image_name", "providers"]}, "imagesearchlaunch_similaritySearchImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "n": {"type": "integer", "minimum": 1, "default": 10, "description": "The `n` parameter specifies the number of images you want to be returned in the response. It determines the count of the most similar images to the input image that will be included in the response. By default, it is set to 10."}}, "required": ["providers"]}, "imagesearchupload_imageUploadImageRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:jpg|jpeg|png|tiff)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "image_name": {"type": "string", "minLength": 1, "description": "The image name need to have the extension of the file."}}, "required": ["image_name", "providers"]}, "llmchatChatDataClass": {"properties": {"id": {"description": "Unique identifier for this completion", "title": "Id", "type": "string"}, "object": {"description": "Object type, always 'chat.completion'", "title": "Object", "type": "string"}, "created": {"description": "Unix timestamp for when the completion was created", "title": "Created", "type": "integer"}, "model": {"description": "The model used for completion", "title": "Model", "type": "string"}, "choices": {"description": "List of chat completion choices generated by the model", "items": {"$ref": "#/components/schemas/ChatCompletionChoice"}, "title": "Choices", "type": "array"}, "usage": {"allOf": [{"$ref": "#/components/schemas/ChatCompletionUsage"}], "description": "Usage statistics for the completion request"}, "system_fingerprint": {"default": null, "description": "Identifier for the system version that processed the request", "title": "System Fingerprint", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["id", "object", "created", "model", "choices", "usage", "status"], "title": "llmchatChatDataClass", "type": "object"}, "llmchatResponseModel": {"properties": {"deepinfra": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "huggingface": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "together_ai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "databricks": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "deepseek": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "groq": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "ovhcloud": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "dashscope": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "cloudflare": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "nebius": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "iointelligence": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "cerebras": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "bytedance": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "replicate": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "fireworks_ai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "perplexityai": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "minimax": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}, "meta": {"allOf": [{"$ref": "#/components/schemas/llmchatChatDataClass"}], "default": null}}, "title": "llmchatResponseModel", "type": "object"}, "llmchatllmchatChatRequest": {"type": "object", "properties": {"messages": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "A list containing all the conversations between the user and the assistant.\nEach item in the list should be a dictionary with two keys: 'role' and 'message'.\n\n**role**: Specifies the role of the speaker and can have the values 'user', 'system', 'assistant' or 'tool'.\nThe system role instructs the way the model should answer, e.g. 'You are a helpful assistant'. The user\nrole specifies the user query and assistant is the model's response. The tool role is for external tools that\ncan be used in the conversation.\n\n**message**: A list of dictionaries. Each dictionary in the 'message' list must contain the keys 'type' and 'content'.\n\n#### Structure\n- **type**: Specifies the type of content and can be 'image_url' or 'text'.\n- **content**: A dictionary with the actual content based on the 'type':\n - If 'type' is 'image_url', 'content' must contain 'image_url' and must not contain 'text'.\n - If 'type' is 'text', 'content' must contain 'text' and must not contain 'image_url'.\n\n#### Example\n```json\n[\n {\n \"role\": \"user\",\n \"content\": [\n {\n \"type\": \"text\",\n \"text\": \"Describe this image\"\n },\n {\n \"type\": \"image_url\",\n \"image_url\": {\n \"url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg\"\n }\n }\n ]\n }\n]\n```"}, "model": {"type": "string", "minLength": 1, "description": "The OpenAI model to use for the chat completion. \nThis field is required and specifies which language model will process the conversation. \n\n**Example values**: 'gpt-3.5-turbo', 'gpt-4', 'gpt-4-turbo'"}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "metadata": {"type": "array", "items": {"$ref": "#/components/schemas/MetadataRequest"}, "description": "Optional list of metadata associated with the chat request. \nCan be used to provide additional context or tracking information. \n\n**Example**:\n```json\n{\n \"metadata\": [\n {\"key\": \"conversation_id\", \"value\": \"chat_12345\"},\n {\"key\": \"source\", \"value\": \"customer_support\"}\n ]\n}\n```"}, "frequency_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Controls repetitiveness of model responses by penalizing frequent tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Reduce token repetition\n- Negative values: Encourage repetition\n- 0.0: Default behavior\n\n**Example**: 1.5 to significantly reduce repeated phrases"}, "logit_bias": {"type": "object", "additionalProperties": {"type": "number", "format": "double"}, "description": "Modify the likelihood of specific tokens appearing in the response. \nA dictionary where keys are token IDs and values are bias scores. \n\n**Example**:\n```json\n{\n \"logit_bias\": {\n \"50256\": -100, # Reduce probability of end-of-text token\n \"15\": 5 # Slightly increase probability of a specific token\n }\n}\n```"}, "logprobs": {"type": "boolean", "description": "If set to True, returns log probabilities of the most likely tokens. \nUseful for advanced token probability analysis. \n\n**Example**: True to get detailed token likelihood information"}, "top_logprobs": {"type": "integer", "maximum": 20, "minimum": 0, "description": "Number of top log probabilities to return with each token. \nMust be between 0 and 20. \n\n**Example**: 5 to get top 5 most likely tokens for each position"}, "max_completion_tokens": {"type": "integer", "minimum": 1, "description": "Maximum number of tokens to generate in the completion. \nMust be at least 1. \n\n**Example**: 150 to limit response to approximately 100-150 words"}, "n": {"type": "integer", "minimum": 1, "description": "Number of chat completion choices to generate.\n\n **Example**: 3 to generate multiple alternative responses"}, "modalities": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of supported input/output modalities for the chat. \n\n**Example**:\n```json\n{\n \"modalities\": [\"text\", \"image\", \"audio\"]\n}\n```"}, "prediction": {"type": "object", "additionalProperties": {}, "description": "Optional field for storing prediction-related information. \nFlexible dictionary to capture model's predictive metadata. \n\n**Example**:\n```json\n{\n \"prediction\": {\n \"confidence_score\": 0.85,\n \"top_prediction\": \"response_category\"\n }\n}\n```"}, "audio": {"type": "object", "additionalProperties": {}, "description": "Optional dictionary for audio-related parameters or metadata. \n\n**Example**:\n```json\n{\n \"audio\": {\n \"language\": \"en-US\",\n \"transcription_format\": \"srt\"\n }\n}\n```"}, "presence_penalty": {"type": "number", "format": "double", "maximum": 2.0, "minimum": -2.0, "description": "Adjusts likelihood of discussing new topics by penalizing existing tokens. \nRanges from -2.0 to 2.0. \n\n**Values**:\n- Positive values: Encourage more diverse topics\n- Negative values: Keep discussion more focused\n- 0.0: Default behavior\n\n**Example**: 1.0 to promote topic diversity"}, "response_format": {"type": "object", "additionalProperties": {}, "description": "Specify the desired response format for the completion. \n\n**Example**:\n```json\n{\n \"response_format\": {\n \"type\": \"json_object\",\n \"schema\": {...}\n }\n}\n```"}, "seed": {"type": "integer", "description": "Set a seed for deterministic sampling to reproduce consistent results. \n\n**Example**: 42 for a reproducible random generation process"}, "service_tier": {"allOf": [{"$ref": "#/components/schemas/ServiceTierEnum"}], "description": "Select the service tier for the API request. \n\n**Choices**:\n- 'auto': Automatically select appropriate tier\n- 'default': Use default service configuration\n\n* `auto` - auto\n* `default` - default"}, "stop": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of strings that will cause the model to stop generating. \n\n**Example**:\n```json\n{\n \"stop\": [\"\\n\", \"Human:\", \"AI:\"]\n}\n```"}, "stream": {"type": "boolean", "default": false, "description": "If True, returns tokens as they are generated in a streaming format. \nDefault is False. \n\n**Example**: True for real-time token streaming"}, "stream_options": {"type": "object", "additionalProperties": {}, "description": "Additional configuration for streaming responses. \n\n**Example**:\n```json\n{\n \"stream_options\": {\n \"include_usage\": true\n }\n}\n```"}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "description": "Controls randomness in token selection. \nRanges from 0.0 to 2.0. \n\n**Values**:\n- 0.0: Most deterministic, focused responses\n- 1.0: Balanced randomness\n- 2.0: Most creative, unpredictable responses\n\n**Example**: 0.7 for a good balance of creativity and focus"}, "top_p": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "description": "Nucleus sampling threshold for token selection. \nRanges from 0.0 to 1.0. Default is 1.0. \n\n**Values**:\n- 1.0: Consider all tokens\n- Lower values: More focused, deterministic sampling\n\n**Example**: 0.9 to select from top 90% most probable tokens"}, "tools": {"type": "array", "items": {}, "description": "List of tools or function definitions available to the model. \n\n**Example**:\n```json\n{\n \"tools\": [\n {\n \"type\": \"function\",\n \"function\": {\n \"name\": \"get_weather\",\n \"description\": \"Retrieve current weather\"\n }\n }\n ]\n}\n```"}, "tool_choice": {"type": "string", "minLength": 1, "description": "Specify how tools should be used in the completion. \n\n**Example values**:\n- 'auto': Model decides when to use tools\n- 'none': Disable tool usage\n- Specific tool name to always use a particular tool"}, "parallel_tool_calls": {"type": "boolean", "description": "Allow the model to make multiple tool calls in parallel. \n\n**Example**: True to enable concurrent tool invocations"}, "user": {"type": "string", "minLength": 1, "description": "Optional identifier for the end-user to help track and monitor API usage. \n\n**Example**: 'user_123456'"}, "function_call": {"type": "string", "minLength": 1, "description": "Control how function calls are handled. \n\n**Example values**:\n- 'auto': Default behavior\n- 'none': Disable function calls\n- Specific function name to force its execution"}, "functions": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "List of function definitions available to the model. \n\n**Example**:\n```json\n{\n \"functions\": [\n {\n \"name\": \"get_current_weather\",\n \"description\": \"Get the current weather for a location\",\n \"parameters\": {...}\n }\n ]\n}\n```"}, "thinking": {"allOf": [{"$ref": "#/components/schemas/ThinkingRequest"}], "description": "Configuration for enabling Claude's extended thinking. When enabled, responses include thinking content blocks showing Claude's thinking process before the final answer. Requires a minimum budget of 1,024 tokens and counts towards your max_tokens limit.\n\n**Example**:\n```json\n{\n 'thinking': {\n 'type': 'enabled'\n 'budget_tokens': '1024' }\n}\n```"}, "web_search_options": {"type": "object", "additionalProperties": {}, "description": "Options for web search integration. \n **Example**:\n ```json\n web_search_options={\n \"search_context_size\": \"medium\" # Options: \"low\", \"medium\", \"high\"\n }\n ```"}}, "required": ["messages", "model"]}, "multimodalchatChatDataClass": {"properties": {"generated_text": {"title": "Generated Text", "type": "string"}, "messages": {"items": {"$ref": "#/components/schemas/ChatMessageDataClass"}, "title": "Messages", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["generated_text", "status"], "title": "multimodalchatChatDataClass", "type": "object"}, "multimodalchatChatRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "messages": {"type": "array", "items": {"type": "object", "additionalProperties": {}}, "description": "A list containing all the conversations between the user and the assistant. Each item in the list should be a dictionary with two keys: 'role' and 'message'.\n\n**role**: Specifies the role of the speaker and can have the values 'user' or 'assistant'.\n\n**message**: A list of dictionaries. Each dictionary in the 'message' list must contain the keys 'type' and 'content'.\n\n#### Structure\n- **type**: Specifies the type of content and can be 'media_url', 'media_base64', or 'text'.\n- **content**: A dictionary with the actual content based on the 'type':\n - If 'type' is 'media_url', 'content' must contain 'media_url' and must not contain 'media_base64' or 'text'.\n - If 'type' is 'media_base64', 'content' must contain 'media_base64' and must not contain 'media_url' or 'text'.\n - If 'type' is 'text', 'content' must contain 'text' and must not contain 'media_url' or 'media_base64'.\n\n#### Example\n```json\n[\n {\n 'role': 'user',\n 'content': [\n {\n 'type': 'text',\n 'content': {'text': 'Describe this image'}\n },\n {\n 'type': 'media_url',\n 'content': {\n 'media_url': 'https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg',\n 'media_type': 'image/jpeg'}\n },\n ]\n }\n]\n```"}, "chatbot_global_action": {"type": "string", "nullable": true, "description": "A system message that helps set the behavior of the assistant. For example, 'You are a helpful assistant'."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Controls the creativity of the model's responses. Higher values (up to 2) make the output more random, while lower values make it more focused and deterministic. A value of 0 (argmax sampling) is useful for scenarios requiring precise answers."}, "max_tokens": {"type": "integer", "maximum": 300000, "minimum": 1, "default": 2048, "description": "The maximum number of tokens to generate in the completion. This value, combined with the token count of your prompt, cannot exceed the model's context length."}, "stop_sequences": {"type": "array", "items": {"type": "string"}, "default": [], "description": "A list of sequences where the model will stop generating further tokens. Useful for controlling response length and format."}, "top_k": {"type": "integer", "maximum": 500, "minimum": 0, "description": "Limits the sampling pool to the top K options for each token. Setting this to a lower value can make the output more focused and deterministic."}, "top_p": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "description": "Enables nucleus sampling, where the model considers the smallest number of tokens whose cumulative probability is at least top_p. This allows for a dynamic selection of tokens based on probability, offering a balance between focus and creativity."}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}}, "required": ["messages", "providers"]}, "multimodalchatResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/multimodalchatChatDataClass"}], "default": null}}, "title": "multimodalchatResponseModel", "type": "object"}, "ocranonymization_asyncAnonymizationAsyncDataClass": {"properties": {"document": {"title": "Document", "type": "string"}, "document_url": {"title": "Document Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["document", "document_url", "id", "final_status"], "title": "ocranonymization_asyncAnonymizationAsyncDataClass", "type": "object"}, "ocranonymization_asyncModel": {"properties": {"base64": {"allOf": [{"$ref": "#/components/schemas/ocranonymization_asyncAnonymizationAsyncDataClass"}], "default": null}, "privateai": {"allOf": [{"$ref": "#/components/schemas/ocranonymization_asyncAnonymizationAsyncDataClass"}], "default": null}, "readyredact": {"allOf": [{"$ref": "#/components/schemas/ocranonymization_asyncAnonymizationAsyncDataClass"}], "default": null}}, "title": "ocranonymization_asyncModel", "type": "object"}, "ocrbank_check_parsingBankCheckParsingDataClass": {"properties": {"extracted_data": {"items": {"$ref": "#/components/schemas/ItemBankCheckParsingDataClass"}, "title": "Extracted Data", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocrbank_check_parsingBankCheckParsingDataClass", "type": "object"}, "ocrbank_check_parsingBankCheckParsingRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocrbank_check_parsingResponseModel": {"properties": {"base64": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}, "veryfi": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}, "mindee": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrbank_check_parsingBankCheckParsingDataClass"}], "default": null}}, "title": "ocrbank_check_parsingResponseModel", "type": "object"}, "ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/CustomDocumentParsingAsyncItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass", "type": "object"}, "ocrcustom_document_parsing_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrcustom_document_parsing_asyncCustomDocumentParsingAsyncDataClass"}], "default": null}}, "title": "ocrcustom_document_parsing_asyncModel", "type": "object"}, "ocrdata_extractionDataExtractionDataClass": {"properties": {"fields": {"items": {"$ref": "#/components/schemas/ItemDataExtraction"}, "title": "Fields", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocrdata_extractionDataExtractionDataClass", "type": "object"}, "ocrdata_extractionDataExtractionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocrdata_extractionResponseModel": {"properties": {"base64": {"allOf": [{"$ref": "#/components/schemas/ocrdata_extractionDataExtractionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/ocrdata_extractionDataExtractionDataClass"}], "default": null}}, "title": "ocrdata_extractionResponseModel", "type": "object"}, "ocrfinancial_parserFinancialParserDataClass": {"properties": {"extracted_data": {"description": "List of parsed financial data objects (per page).", "items": {"$ref": "#/components/schemas/FinancialParserObjectDataClass"}, "title": "Extracted Data", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocrfinancial_parserFinancialParserDataClass", "type": "object"}, "ocrfinancial_parserFinancialParserRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "language": {"type": "string", "nullable": true, "description": "Language code of the language the document is written in (ex: fr (French), en (English), es (Spanish))"}, "document_type": {"allOf": [{"$ref": "#/components/schemas/DocumentTypeEnum"}], "default": "invoice", "description": "Specify the type of your document. Can be Set to 'auto-detect' for automatic detection if the provider supports it. Otherwise, the default is 'invoice'.\n\n* `auto-detect` - auto-detect\n* `invoice` - invoice\n* `receipt` - receipt"}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocrfinancial_parserResponseModel": {"properties": {"base64": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "klippa": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "dataleon": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "veryfi": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "eagledoc": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "mindee": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "tabscanner": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "affinda": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrfinancial_parserFinancialParserDataClass"}], "default": null}}, "title": "ocrfinancial_parserResponseModel", "type": "object"}, "ocridentity_parserIdentityParserDataClass": {"properties": {"extracted_data": {"items": {"$ref": "#/components/schemas/InfosIdentityParserDataClass"}, "title": "Extracted Data", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "ocridentity_parserIdentityParserDataClass", "type": "object"}, "ocridentity_parserIdentityParserRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "ocridentity_parserResponseModel": {"properties": {"base64": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "mindee": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "klippa": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}, "affinda": {"allOf": [{"$ref": "#/components/schemas/ocridentity_parserIdentityParserDataClass"}], "default": null}}, "title": "ocridentity_parserResponseModel", "type": "object"}, "ocrocrOcrDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "bounding_boxes": {"items": {"$ref": "#/components/schemas/Bounding_box"}, "title": "Bounding Boxes", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "ocrocrOcrDataClass", "type": "object"}, "ocrocrOcrRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "language": {"type": "string", "nullable": true, "description": "Language code of the language the document is written in (ex: fr (French), en (English), es (Spanish))"}}, "required": ["providers"]}, "ocrocrResponseModel": {"properties": {"base64": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "sentisight": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "api4ai": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "clarifai": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrocrOcrDataClass"}], "default": null}}, "title": "ocrocrResponseModel", "type": "object"}, "ocrocr_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrocr_asyncOcrAsyncDataClass"}], "default": null}}, "title": "ocrocr_asyncModel", "type": "object"}, "ocrocr_asyncOcrAsyncDataClass": {"properties": {"raw_text": {"title": "Raw Text", "type": "string"}, "pages": {"description": "List of pages", "items": {"$ref": "#/components/schemas/Page"}, "title": "Pages", "type": "array"}, "number_of_pages": {"description": "Number of pages in the document", "title": "Number Of Pages", "type": "integer"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["raw_text", "number_of_pages", "id", "final_status"], "title": "ocrocr_asyncOcrAsyncDataClass", "type": "object"}, "ocrocr_tables_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/ocrocr_tables_asyncOcrTablesAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/ocrocr_tables_asyncOcrTablesAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/ocrocr_tables_asyncOcrTablesAsyncDataClass"}], "default": null}}, "title": "ocrocr_tables_asyncModel", "type": "object"}, "ocrocr_tables_asyncOcrTablesAsyncDataClass": {"properties": {"pages": {"items": {"$ref": "#/components/schemas/Page"}, "title": "Pages", "type": "array"}, "num_pages": {"title": "Num Pages", "type": "integer"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["num_pages", "id", "final_status"], "title": "ocrocr_tables_asyncOcrTablesAsyncDataClass", "type": "object"}, "ocrresume_parserResponseModel": {"properties": {"senseloaf": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "hireability": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "extracta": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "klippa": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}, "affinda": {"allOf": [{"$ref": "#/components/schemas/ocrresume_parserResumeParserDataClass"}], "default": null}}, "title": "ocrresume_parserResponseModel", "type": "object"}, "ocrresume_parserResumeParserDataClass": {"properties": {"extracted_data": {"$ref": "#/components/schemas/ResumeExtractedData"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["extracted_data", "status"], "title": "ocrresume_parserResumeParserDataClass", "type": "object"}, "ocrresume_parserResumeParserRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "convert_to_pdf": {"type": "boolean", "nullable": true, "default": false, "description": "Boolean value to specify weather to convert the doc/docx files to pdf format to be accepted by a majority of the providers"}}, "required": ["providers"]}, "textai_detectionAiDetectionDataClass": {"properties": {"ai_score": {"title": "Ai Score", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/AiDetectionItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["ai_score", "status"], "title": "textai_detectionAiDetectionDataClass", "type": "object"}, "textai_detectionAiDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "provider_params": {"type": "string", "description": "\nParameters specific to the provider that you want to send along the request.\n\nit should take a *provider* name as key and an object of parameters as value.\n\nExample:\n\n {\n \"deepgram\": {\n \"filler_words\": true,\n \"smart_format\": true,\n \"callback\": \"https://webhook.site/0000\"\n },\n \"assembly\": {\n \"webhook_url\": \"https://webhook.site/0000\"\n }\n }\n\nPlease refer to the documentation of each provider to see which parameters to send.\n"}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}}, "required": ["providers", "text"]}, "textai_detectionResponseModel": {"properties": {"winstonai": {"allOf": [{"$ref": "#/components/schemas/textai_detectionAiDetectionDataClass"}], "default": null}, "sapling": {"allOf": [{"$ref": "#/components/schemas/textai_detectionAiDetectionDataClass"}], "default": null}, "originalityai": {"allOf": [{"$ref": "#/components/schemas/textai_detectionAiDetectionDataClass"}], "default": null}}, "title": "textai_detectionResponseModel", "type": "object"}, "textanonymizationAnonymizationDataClass": {"properties": {"result": {"title": "Result", "type": "string"}, "entities": {"items": {"$ref": "#/components/schemas/AnonymizationEntity"}, "title": "Entities", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["result", "status"], "title": "textanonymizationAnonymizationDataClass", "type": "object"}, "textanonymizationResponseModel": {"properties": {"privateai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "emvista": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textanonymizationAnonymizationDataClass"}], "default": null}}, "title": "textanonymizationResponseModel", "type": "object"}, "textchatChatDataClass": {"properties": {"generated_text": {"title": "Generated Text", "type": "string"}, "message": {"items": {"$ref": "#/components/schemas/ChatMessageDataClass"}, "title": "Message", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["generated_text", "status"], "title": "textchatChatDataClass", "type": "object"}, "textchatChatRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "nullable": true, "default": "", "description": "Start your conversation here..."}, "chatbot_global_action": {"type": "string", "nullable": true, "default": "", "description": "A system message that helps set the behavior of the assistant. For example, 'You are a helpful assistant'."}, "previous_history": {"type": "array", "items": {"$ref": "#/components/schemas/ChatMessageRequest"}, "description": "A list containing all the previous conversations between the user and the chatbot AI. Each item in the list should be a dictionary with two keys: 'role' and 'message'. The 'role' key specifies the role of the speaker and can have the values 'user' or 'assistant'. The 'message' key contains the text of the conversation from the respective role. For example: [{'role': 'user', 'message': 'Hello'}, {'role': 'assistant', 'message': 'Hi, how can I help you?'}, ...]. This format allows easy identification of the speaker's role and their corresponding message."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 4096, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "tool_choice": {"allOf": [{"$ref": "#/components/schemas/ToolChoiceEnum"}], "default": "auto", "description": "`auto`: the model will choose to use tools if needed, `required`: force model to use any of the available tools, `none`: force model to not select a tool\n\n* `auto` - auto\n* `required` - required\n* `none` - none"}, "available_tools": {"type": "array", "items": {"$ref": "#/components/schemas/ChatAvailableToolsRequest"}, "description": "A list of tools the model may generate the right arguments for."}, "tool_results": {"type": "array", "items": {"$ref": "#/components/schemas/ChatToolResultRequest"}, "description": "List of results obtained from applying the tool_call arguments to your own tool."}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}}, "required": ["providers"]}, "textchatChatStreamRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "nullable": true, "default": "", "description": "Start your conversation here..."}, "chatbot_global_action": {"type": "string", "nullable": true, "default": "", "description": "A system message that helps set the behavior of the assistant. For example, 'You are a helpful assistant'."}, "previous_history": {"type": "array", "items": {"$ref": "#/components/schemas/ChatMessageRequest"}, "description": "A list containing all the previous conversations between the user and the chatbot AI. Each item in the list should be a dictionary with two keys: 'role' and 'message'. The 'role' key specifies the role of the speaker and can have the values 'user' or 'assistant'. The 'message' key contains the text of the conversation from the respective role. For example: [{'role': 'user', 'message': 'Hello'}, {'role': 'assistant', 'message': 'Hi, how can I help you?'}, ...]. This format allows easy identification of the speaker's role and their corresponding message."}, "temperature": {"type": "number", "format": "double", "maximum": 2, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 4096, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}, "tool_choice": {"allOf": [{"$ref": "#/components/schemas/ToolChoiceEnum"}], "default": "auto", "description": "`auto`: the model will choose to use tools if needed, `required`: force model to use any of the available tools, `none`: force model to not select a tool\n\n* `auto` - auto\n* `required` - required\n* `none` - none"}, "available_tools": {"type": "array", "items": {"$ref": "#/components/schemas/ChatAvailableToolsRequest"}, "description": "A list of tools the model may generate the right arguments for."}, "tool_results": {"type": "array", "items": {"$ref": "#/components/schemas/ChatToolResultRequest"}, "description": "List of results obtained from applying the tool_call arguments to your own tool."}, "reasoning_effort": {"allOf": [{"$ref": "#/components/schemas/ReasoningEffortEnum"}], "description": "Optional parameter to control the model's reasoning depth. \nAllows specifying the level of analytical effort in generating responses. \n\n**Choices**:\n- 'low': Minimal reasoning, quick responses\n- 'medium': Balanced reasoning approach\n- 'high': In-depth, comprehensive reasoning\n\n**Example**: 'high' for complex problem-solving tasks\n\n* `low` - low\n* `medium` - medium\n* `high` - high"}, "fallback_type": {"allOf": [{"$ref": "#/components/schemas/FallbackTypeEnum"}], "default": "continue"}}, "required": ["providers"]}, "textchatResponseModel": {"properties": {"deepseek": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "groq": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "perplexityai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "replicate": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "together_ai": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "meta": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textchatChatDataClass"}], "default": null}}, "title": "textchatResponseModel", "type": "object"}, "textcode_generationCodeGenerationDataClass": {"properties": {"generated_text": {"title": "Generated Text", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["generated_text", "status"], "title": "textcode_generationCodeGenerationDataClass", "type": "object"}, "textcode_generationCodeGenerationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "prompt": {"type": "string", "nullable": true, "default": "", "description": "Entrer the source code that will be used as a context."}, "instruction": {"type": "string", "minLength": 1, "description": "Entrer the instruction you want to be followed."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "minimum": 1, "default": 1000, "description": "The maximum number of tokens to generate in the completion. The token count of your prompt plus max_tokens cannot exceed the model's context length."}}, "required": ["instruction", "providers"]}, "textcode_generationResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/textcode_generationCodeGenerationDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textcode_generationCodeGenerationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textcode_generationCodeGenerationDataClass"}], "default": null}}, "title": "textcode_generationResponseModel", "type": "object"}, "textembeddingsEmbeddingsDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/EmbeddingDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "textembeddingsEmbeddingsDataClass", "type": "object"}, "textembeddingsEmbeddingsRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "texts": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "List of texts to transform into embeddings."}, "dimensions": {"type": "integer", "minimum": 1, "nullable": true, "description": " parameter to control the size of the output embedding vecto"}}, "required": ["providers", "texts"]}, "textembeddingsResponseModel": {"properties": {"iointelligence": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "ai21labs": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "jina": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "mistral": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textembeddingsEmbeddingsDataClass"}], "default": null}}, "title": "textembeddingsResponseModel", "type": "object"}, "textemotion_detectionEmotionDetectionDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/EmotionItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "textemotion_detectionEmotionDetectionDataClass", "type": "object"}, "textemotion_detectionEmotionDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}}, "required": ["providers", "text"]}, "textemotion_detectionResponseModel": {"properties": {"vernai": {"allOf": [{"$ref": "#/components/schemas/textemotion_detectionEmotionDetectionDataClass"}], "default": null}}, "title": "textemotion_detectionResponseModel", "type": "object"}, "textentity_sentimentEntitySentimentDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/Entity"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["items", "status"], "title": "textentity_sentimentEntitySentimentDataClass", "type": "object"}, "textentity_sentimentEntitySentimentRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}}, "required": ["providers", "text"]}, "textentity_sentimentResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/textentity_sentimentEntitySentimentDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textentity_sentimentEntitySentimentDataClass"}], "default": null}}, "title": "textentity_sentimentResponseModel", "type": "object"}, "textkeyword_extractionKeywordExtractionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/InfosKeywordExtractionDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "textkeyword_extractionKeywordExtractionDataClass", "type": "object"}, "textkeyword_extractionResponseModel": {"properties": {"corticalio": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "emvista": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textkeyword_extractionKeywordExtractionDataClass"}], "default": null}}, "title": "textkeyword_extractionResponseModel", "type": "object"}, "textmoderationModerationDataClass": {"properties": {"nsfw_likelihood": {"title": "Nsfw Likelihood", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/TextModerationItem"}, "title": "Items", "type": "array"}, "nsfw_likelihood_score": {"title": "Nsfw Likelihood Score", "type": "integer"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["nsfw_likelihood", "nsfw_likelihood_score", "status"], "title": "textmoderationModerationDataClass", "type": "object"}, "textmoderationResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/textmoderationModerationDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textmoderationModerationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textmoderationModerationDataClass"}], "default": null}}, "title": "textmoderationResponseModel", "type": "object"}, "textnamed_entity_recognitionNamedEntityRecognitionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/InfosNamedEntityRecognitionDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "textnamed_entity_recognitionNamedEntityRecognitionDataClass", "type": "object"}, "textnamed_entity_recognitionResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textnamed_entity_recognitionNamedEntityRecognitionDataClass"}], "default": null}}, "title": "textnamed_entity_recognitionResponseModel", "type": "object"}, "textplagia_detectionPlagiaDetectionDataClass": {"properties": {"plagia_score": {"title": "Plagia Score", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/PlagiaDetectionItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["plagia_score", "status"], "title": "textplagia_detectionPlagiaDetectionDataClass", "type": "object"}, "textplagia_detectionPlagiaDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "A text content on which a plagiarism detection analysis will be run"}, "title": {"type": "string", "nullable": true, "default": "", "description": "Content title"}}, "required": ["providers", "text"]}, "textplagia_detectionResponseModel": {"properties": {"winstonai": {"allOf": [{"$ref": "#/components/schemas/textplagia_detectionPlagiaDetectionDataClass"}], "default": null}, "originalityai": {"allOf": [{"$ref": "#/components/schemas/textplagia_detectionPlagiaDetectionDataClass"}], "default": null}}, "title": "textplagia_detectionResponseModel", "type": "object"}, "textprompt_optimizationPromptOptimizationDataClass": {"properties": {"missing_information": {"title": "Missing Information", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/PromptDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["missing_information", "status"], "title": "textprompt_optimizationPromptOptimizationDataClass", "type": "object"}, "textprompt_optimizationPromptOptimizationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Description of the desired prompt."}, "target_provider": {"type": "string", "minLength": 1, "description": "Select the provider for the prompt optimization"}}, "required": ["providers", "target_provider", "text"]}, "textprompt_optimizationResponseModel": {"properties": {"openai": {"allOf": [{"$ref": "#/components/schemas/textprompt_optimizationPromptOptimizationDataClass"}], "default": null}}, "title": "textprompt_optimizationResponseModel", "type": "object"}, "textsentiment_analysisResponseModel": {"properties": {"emvista": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "ibm": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "sapling": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "tenstorrent": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/textsentiment_analysisSentimentAnalysisDataClass"}], "default": null}}, "title": "textsentiment_analysisResponseModel", "type": "object"}, "textsentiment_analysisSentimentAnalysisDataClass": {"properties": {"general_sentiment": {"allOf": [{"$ref": "#/components/schemas/GeneralSentimentEnum"}], "title": "General Sentiment"}, "general_sentiment_rate": {"maximum": 1.0, "minimum": 0.0, "title": "General Sentiment Rate", "type": "integer"}, "items": {"items": {"$ref": "#/components/schemas/SegmentSentimentAnalysisDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["general_sentiment", "general_sentiment_rate", "status"], "title": "textsentiment_analysisSentimentAnalysisDataClass", "type": "object"}, "textspell_checkResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "sapling": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "prowritingaid": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textspell_checkSpellCheckDataClass"}], "default": null}}, "title": "textspell_checkResponseModel", "type": "object"}, "textspell_checkSpellCheckDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "items": {"items": {"$ref": "#/components/schemas/SpellCheckItem"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "textspell_checkSpellCheckDataClass", "type": "object"}, "textspell_checkSpellCheckRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}}, "required": ["providers", "text"]}, "textsummarizeResponseModel": {"properties": {"emvista": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "alephalpha": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "writesonic": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "anthropic": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "meaningcloud": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "cohere": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/textsummarizeSummarizeDataClass"}], "default": null}}, "title": "textsummarizeResponseModel", "type": "object"}, "textsummarizeSummarizeDataClass": {"properties": {"result": {"title": "Result", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["result", "status"], "title": "textsummarizeSummarizeDataClass", "type": "object"}, "textsummarizeSummarizeRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}, "output_sentences": {"type": "integer", "minimum": 1, "default": 1}}, "required": ["providers", "text"]}, "texttopic_extractionResponseModel": {"properties": {"tenstorrent": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/texttopic_extractionTopicExtractionDataClass"}], "default": null}}, "title": "texttopic_extractionResponseModel", "type": "object"}, "texttopic_extractionTopicExtractionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/ExtractedTopic"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "texttopic_extractionTopicExtractionDataClass", "type": "object"}, "texttopic_extractiontextanonymizationtextmoderationtextnamed_entity_recognitiontextkeyword_extractiontextsyntax_analysistextsentiment_analysisTextAnalysisRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "language": {"type": "string", "nullable": true, "description": "Language code for the language the input text is written in (eg: en, fr)."}}, "required": ["providers", "text"]}, "translationautomatic_translationAutomaticTranslationDataClass": {"properties": {"text": {"title": "Text", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["text", "status"], "title": "translationautomatic_translationAutomaticTranslationDataClass", "type": "object"}, "translationautomatic_translationAutomaticTranslationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}, "source_language": {"type": "string", "nullable": true, "description": "Source language code (ex: en, fr)"}, "target_language": {"type": "string", "minLength": 1, "description": "Target language code (ex: en, fr)"}}, "required": ["providers", "target_language", "text"]}, "translationautomatic_translationResponseModel": {"properties": {"deepl": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "xai": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "modernmt": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/translationautomatic_translationAutomaticTranslationDataClass"}], "default": null}}, "title": "translationautomatic_translationResponseModel", "type": "object"}, "translationdocument_translationDocumentTranslationDataClass": {"properties": {"file": {"title": "File", "type": "string"}, "document_resource_url": {"title": "Document Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["file", "document_resource_url", "status"], "title": "translationdocument_translationDocumentTranslationDataClass", "type": "object"}, "translationdocument_translationDocumentTranslationRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "file_password": {"type": "string", "nullable": true, "description": "If your PDF file has a password, you can pass it here!", "maxLength": 200}, "source_language": {"type": "string", "nullable": true, "description": "Source language code (ex: en, fr)"}, "target_language": {"type": "string", "minLength": 1, "description": "Target language code (ex: en, fr)"}}, "required": ["providers", "target_language"]}, "translationdocument_translationResponseModel": {"properties": {"deepl": {"allOf": [{"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/translationdocument_translationDocumentTranslationDataClass"}], "default": null}}, "title": "translationdocument_translationResponseModel", "type": "object"}, "translationlanguage_detectionLanguageDetectionDataClass": {"properties": {"items": {"items": {"$ref": "#/components/schemas/InfosLanguageDetectionDataClass"}, "title": "Items", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["status"], "title": "translationlanguage_detectionLanguageDetectionDataClass", "type": "object"}, "translationlanguage_detectionLanguageDetectionRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "text": {"type": "string", "minLength": 1, "description": "Text to analyze"}}, "required": ["providers", "text"]}, "translationlanguage_detectionResponseModel": {"properties": {"xai": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "amazon": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "oneai": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "modernmt": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/translationlanguage_detectionLanguageDetectionDataClass"}], "default": null}}, "title": "translationlanguage_detectionResponseModel", "type": "object"}, "videodeepfake_detection_asyncDeepfakeDetectionAsyncDataClass": {"properties": {"average_score": {"maximum": 1.0, "minimum": 0.0, "title": "Average Score", "type": "integer"}, "prediction": {"allOf": [{"$ref": "#/components/schemas/PredictionB20Enum"}], "title": "Prediction"}, "details_per_frame": {"items": {"$ref": "#/components/schemas/DetailPerFrame"}, "title": "Details Per Frame", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["average_score", "prediction", "id", "final_status"], "title": "videodeepfake_detection_asyncDeepfakeDetectionAsyncDataClass", "type": "object"}, "videodeepfake_detection_asyncModel": {"properties": {"sightengine": {"allOf": [{"$ref": "#/components/schemas/videodeepfake_detection_asyncDeepfakeDetectionAsyncDataClass"}], "default": null}}, "title": "videodeepfake_detection_asyncModel", "type": "object"}, "videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass": {"properties": {"moderation": {"items": {"$ref": "#/components/schemas/ContentNSFW"}, "title": "Moderation", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass", "type": "object"}, "videoexplicit_content_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoexplicit_content_detection_asyncExplicitContentDetectionAsyncDataClass"}], "default": null}}, "title": "videoexplicit_content_detection_asyncModel", "type": "object"}, "videoface_detection_asyncFaceDetectionAsyncDataClass": {"properties": {"faces": {"items": {"$ref": "#/components/schemas/VideoFace"}, "title": "Faces", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoface_detection_asyncFaceDetectionAsyncDataClass", "type": "object"}, "videoface_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoface_detection_asyncFaceDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoface_detection_asyncFaceDetectionAsyncDataClass"}], "default": null}}, "title": "videoface_detection_asyncModel", "type": "object"}, "videogeneration_asyncGenerationAsyncDataClass": {"properties": {"video": {"title": "Video", "type": "string"}, "video_resource_url": {"title": "Video Resource Url", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["video", "video_resource_url", "id", "final_status"], "title": "videogeneration_asyncGenerationAsyncDataClass", "type": "object"}, "videogeneration_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "openai": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "bytedance": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "minimax": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "microsoft": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videogeneration_asyncGenerationAsyncDataClass"}], "default": null}}, "title": "videogeneration_asyncModel", "type": "object"}, "videolabel_detection_asyncLabelDetectionAsyncDataClass": {"properties": {"labels": {"items": {"$ref": "#/components/schemas/VideoLabel"}, "title": "Labels", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videolabel_detection_asyncLabelDetectionAsyncDataClass", "type": "object"}, "videolabel_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videolabel_detection_asyncLabelDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videolabel_detection_asyncLabelDetectionAsyncDataClass"}], "default": null}}, "title": "videolabel_detection_asyncModel", "type": "object"}, "videologo_detection_asyncLogoDetectionAsyncDataClass": {"properties": {"logos": {"items": {"$ref": "#/components/schemas/LogoTrack"}, "title": "Logos", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videologo_detection_asyncLogoDetectionAsyncDataClass", "type": "object"}, "videologo_detection_asyncModel": {"properties": {"twelvelabs": {"allOf": [{"$ref": "#/components/schemas/videologo_detection_asyncLogoDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videologo_detection_asyncLogoDetectionAsyncDataClass"}], "default": null}}, "title": "videologo_detection_asyncModel", "type": "object"}, "videoobject_tracking_asyncModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/videoobject_tracking_asyncObjectTrackingAsyncDataClass"}], "default": null}}, "title": "videoobject_tracking_asyncModel", "type": "object"}, "videoobject_tracking_asyncObjectTrackingAsyncDataClass": {"properties": {"objects": {"items": {"$ref": "#/components/schemas/ObjectTrack"}, "title": "Objects", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoobject_tracking_asyncObjectTrackingAsyncDataClass", "type": "object"}, "videoperson_tracking_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoperson_tracking_asyncPersonTrackingAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoperson_tracking_asyncPersonTrackingAsyncDataClass"}], "default": null}}, "title": "videoperson_tracking_asyncModel", "type": "object"}, "videoperson_tracking_asyncPersonTrackingAsyncDataClass": {"properties": {"persons": {"items": {"$ref": "#/components/schemas/VideoTrackingPerson"}, "title": "Persons", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoperson_tracking_asyncPersonTrackingAsyncDataClass", "type": "object"}, "videoquestion_answerQuestionAnswerDataClass": {"properties": {"answer": {"title": "Answer", "type": "string"}, "finish_reason": {"title": "Finish Reason", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "status": {"allOf": [{"$ref": "#/components/schemas/Status549Enum"}], "title": "Status"}}, "required": ["answer", "finish_reason", "status"], "title": "videoquestion_answerQuestionAnswerDataClass", "type": "object"}, "videoquestion_answerQuestionAnswerRequest": {"type": "object", "properties": {"settings": {"type": "string", "default": {}, "description": "A dictionnary or a json object to specify specific models to use for some providers.
It can be in the following format: {\"google\" : \"google_model\", \"ibm\": \"ibm_model\"...}.\n "}, "providers": {"type": "array", "items": {"type": "string", "minLength": 1}, "description": "It can be one (ex: **'amazon'** or **'google'**) or multiple provider(s) (ex: **'amazon,microsoft,google'**) that the data will be redirected to in order to get the processed results.
Providers can also be invoked with specific models (ex: providers: **'amazon/model1, amazon/model2, google/model3'**)"}, "fallback_providers": {"type": "array", "items": {"type": "string"}, "default": [], "description": "Providers in this list will be used as fallback if the call to provider in `providers` parameter fails.\n To use this feature, you must input **only one** provider in the `providers` parameter. but you can put up to 5 fallbacks.\n\nThey will be tried in the same order they are input, and it will stop to the first provider who doesn't fail.\n\n\n*Doesn't work with async subfeatures.*\n ", "maxItems": 5}, "response_as_dict": {"type": "boolean", "default": true, "description": "Optional : When set to **true** (default), the response is an object of responses with providers names as keys :
\n ``` {\"google\" : { \"status\": \"success\", ... }, } ```
\n When set to **false** the response structure is a list of response objects :
\n ``` [{\"status\": \"success\", \"provider\": \"google\" ... }, ] ```.
\n "}, "attributes_as_list": {"type": "boolean", "default": false, "description": "Optional : When set to **false** (default) the structure of the extracted items is list of objects having different attributes :
\n ```{'items': [{\"attribute_1\": \"x1\",\"attribute_2\": \"y2\"}, ... ]}```
\n When it is set to **true**, the response contains an object with each attribute as a list :
\n ```{ \"attribute_1\": [\"x1\",\"x2\", ...], \"attribute_2\": [y1, y2, ...]}``` "}, "show_base_64": {"type": "boolean", "default": true}, "show_original_response": {"type": "boolean", "default": false, "description": "Optional : Shows the original response of the provider.
\n When set to **true**, a new attribute *original_response* will appear in the response object."}, "file": {"type": "string", "format": "binary", "description": "File to analyse in binary format to be used with *content-type*: **multipart/form-data**
**Does not work with application/json !**", "pattern": "(?:mp4|mpeg|mov|avi|x\\-flx|mpg|webm|wmv|3gpp)$"}, "file_url": {"type": "string", "format": "uri", "nullable": true, "description": "File **URL** to analyse to be used with with *content-type*: **application/json**."}, "temperature": {"type": "number", "format": "double", "maximum": 1, "minimum": 0, "default": 0.0, "description": "Higher values mean the model will take more risks and value 0 (argmax sampling) works better for scenarios with a well-defined answer."}, "max_tokens": {"type": "integer", "maximum": 3000000, "minimum": 1, "description": "The maximum number of tokens to generate in the completion. This value, combined with the token count of your prompt, cannot exceed the model's context length."}, "text": {"type": "string", "minLength": 1, "description": "Question about the video"}}, "required": ["providers", "text"]}, "videoquestion_answerResponseModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videoquestion_answerQuestionAnswerDataClass"}], "default": null}}, "title": "videoquestion_answerResponseModel", "type": "object"}, "videoquestion_answer_asyncModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/videoquestion_answer_asyncQuestionAnswerAsyncDataClass"}], "default": null}}, "title": "videoquestion_answer_asyncModel", "type": "object"}, "videoquestion_answer_asyncQuestionAnswerAsyncDataClass": {"properties": {"answer": {"title": "Answer", "type": "string"}, "finish_reason": {"title": "Finish Reason", "type": "string"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["answer", "finish_reason", "id", "final_status"], "title": "videoquestion_answer_asyncQuestionAnswerAsyncDataClass", "type": "object"}, "videoshot_change_detection_asyncModel": {"properties": {"google": {"allOf": [{"$ref": "#/components/schemas/videoshot_change_detection_asyncShotChangeDetectionAsyncDataClass"}], "default": null}}, "title": "videoshot_change_detection_asyncModel", "type": "object"}, "videoshot_change_detection_asyncShotChangeDetectionAsyncDataClass": {"properties": {"shotAnnotations": {"items": {"$ref": "#/components/schemas/ShotFrame"}, "title": "Shotannotations", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videoshot_change_detection_asyncShotChangeDetectionAsyncDataClass", "type": "object"}, "videotext_detection_asyncModel": {"properties": {"amazon": {"allOf": [{"$ref": "#/components/schemas/videotext_detection_asyncTextDetectionAsyncDataClass"}], "default": null}, "twelvelabs": {"allOf": [{"$ref": "#/components/schemas/videotext_detection_asyncTextDetectionAsyncDataClass"}], "default": null}, "google": {"allOf": [{"$ref": "#/components/schemas/videotext_detection_asyncTextDetectionAsyncDataClass"}], "default": null}}, "title": "videotext_detection_asyncModel", "type": "object"}, "videotext_detection_asyncTextDetectionAsyncDataClass": {"properties": {"texts": {"items": {"$ref": "#/components/schemas/VideoText"}, "title": "Texts", "type": "array"}, "original_response": {"default": null, "description": "original response sent by the provider, hidden by default, show it by passing the `show_original_response` field to `true` in your request", "title": "Original Response"}, "id": {"title": "Id", "type": "string"}, "final_status": {"allOf": [{"$ref": "#/components/schemas/FinalStatusEnum"}], "title": "Final Status"}, "error": {"default": null, "title": "Error", "type": "object"}}, "required": ["id", "final_status"], "title": "videotext_detection_asyncTextDetectionAsyncDataClass", "type": "object"}}, "securitySchemes": {"FeatureApiAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"}, "WorkflowShareApiAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"}, "jwtAuth": {"type": "http", "scheme": "bearer", "bearerFormat": "JWT"}}}, "servers": [{"url": "https://api.edenai.run/v2"}], "security": [{"FeatureApiAuth": []}]} \ No newline at end of file