diff --git a/integrations/llms/ai21.mdx b/integrations/llms/ai21.mdx index 1dd46e15..8aeaa4be 100644 --- a/integrations/llms/ai21.mdx +++ b/integrations/llms/ai21.mdx @@ -30,13 +30,13 @@ print(response.choices[0].message.content) ``` ```js Javascript icon="square-js" - import Portkey from 'portkey-ai' +import Portkey from 'portkey-ai' // 1. Install: npm install portkey-ai // 2. Add @ai21 provider in model catalog // 3. Use it: - const portkey = new Portkey({ +const portkey = new Portkey({ apiKey: "PORTKEY_API_KEY" }) @@ -48,7 +48,7 @@ const response = await portkey.chat.completions.create({ console.log(response.choices[0].message.content) ``` -```python OpenAI Py icon="openai" +```python OpenAI Py icon="python" from openai import OpenAI from portkey_ai import PORTKEY_GATEWAY_URL @@ -69,7 +69,7 @@ response = client.chat.completions.create( print(response.choices[0].message.content) ``` -```js OpenAI JS icon="openai" +```js OpenAI JS icon="square-js" import OpenAI from "openai" import { PORTKEY_GATEWAY_URL } from "portkey-ai" diff --git a/integrations/llms/anyscale-llama2-mistral-zephyr.mdx b/integrations/llms/anyscale-llama2-mistral-zephyr.mdx index 041cf113..cdbe17d0 100644 --- a/integrations/llms/anyscale-llama2-mistral-zephyr.mdx +++ b/integrations/llms/anyscale-llama2-mistral-zephyr.mdx @@ -45,7 +45,7 @@ const response = await portkey.chat.completions.create({ console.log(response.choices[0].message.content) ``` -```python OpenAI Py icon="openai" +```python OpenAI Py icon="python" from openai import OpenAI from portkey_ai import PORTKEY_GATEWAY_URL @@ -66,7 +66,7 @@ response = client.chat.completions.create( print(response.choices[0].message.content) ``` -```js OpenAI JS icon="openai" +```js OpenAI JS icon="square-js" import OpenAI from "openai" import { PORTKEY_GATEWAY_URL } from "portkey-ai" diff --git a/integrations/llms/fireworks/files.mdx b/integrations/llms/fireworks/files.mdx index 0f3d3d39..cad533fa 100644 --- a/integrations/llms/fireworks/files.mdx +++ b/integrations/llms/fireworks/files.mdx @@ -53,7 +53,7 @@ await uploadFile(); ```sh -# you can also use a virtual key here +# you can also use a provider from Model Catalog here curl --location 'https://api.portkey.ai/v1/files' \ --header 'x-portkey-api-key: ' \ --header 'x-portkey-provider: fireworks-ai' \ diff --git a/integrations/llms/google-palm.mdx b/integrations/llms/google-palm.mdx index c225d153..295fafde 100644 --- a/integrations/llms/google-palm.mdx +++ b/integrations/llms/google-palm.mdx @@ -38,10 +38,10 @@ Set up Portkey with your virtual key as part of the initialization configuration ```js - import Portkey from 'portkey-ai' +import Portkey from 'portkey-ai' - const portkey = new Portkey({ +const portkey = new Portkey({ apiKey: "PORTKEY_API_KEY", // defaults to process.env["PORTKEY_API_KEY"] @@ -75,7 +75,7 @@ Use the Portkey instance to send requests to Google Palm. You can also override ```js - const chatCompletion = await portkey.chat.completions.create({ +const chatCompletion = await portkey.chat.completions.create({ messages: [{ role: 'user', content: 'Say this is a test' }], diff --git a/integrations/llms/oracle.mdx b/integrations/llms/oracle.mdx index 09b3b202..7b8cecd4 100644 --- a/integrations/llms/oracle.mdx +++ b/integrations/llms/oracle.mdx @@ -45,9 +45,9 @@ Add these credentials to Portkey to create your Oracle virtual key. ```js - import Portkey from 'portkey-ai' +import Portkey from 'portkey-ai' - const portkey = new Portkey({ +const portkey = new Portkey({ apiKey: "PORTKEY_API_KEY", // defaults to process.env["PORTKEY_API_KEY"] provider: "@ORACLE_PROVIDER" // Your Oracle OCI virtual key }) @@ -70,7 +70,7 @@ Add these credentials to Portkey to create your Oracle virtual key. ```js - const chatCompletion = await portkey.chat.completions.create({ +const chatCompletion = await portkey.chat.completions.create({ messages: [{ role: 'user', content: 'Say this is a test' }], model: 'cohere.command-r-plus', });