diff --git a/apps/desktop/src/components/profileSettings/AiSettings.svelte b/apps/desktop/src/components/profileSettings/AiSettings.svelte
index 299667a9935..2863c46e29c 100644
--- a/apps/desktop/src/components/profileSettings/AiSettings.svelte
+++ b/apps/desktop/src/components/profileSettings/AiSettings.svelte
@@ -4,7 +4,7 @@
import AuthorizationBanner from "$components/AuthorizationBanner.svelte";
import SettingsSection from "$components/SettingsSection.svelte";
import { AISecretHandle, AI_SERVICE, GitAIConfigKey, KeyOption } from "$lib/ai/service";
- import { OpenAIModelName, AnthropicModelName, ModelKind } from "$lib/ai/types";
+ import { OpenAIModelName, AnthropicModelName, ModelKind, type OpenRouterModelName } from "$lib/ai/types";
import { GIT_CONFIG_SERVICE } from "$lib/config/gitConfigService";
import { SECRET_SERVICE } from "$lib/secrets/secretsService";
import { USER_SERVICE } from "$lib/user/userService";
@@ -44,6 +44,8 @@
let ollamaModel: string | undefined = $state();
let lmStudioEndpoint: string | undefined = $state();
let lmStudioModel: string | undefined = $state();
+ let openRouterKey: string | undefined = $state();
+ let openRouterModel: string | undefined = $state();
async function setConfiguration(key: GitAIConfigKey, value: string | undefined) {
if (!initialized) return;
@@ -75,6 +77,9 @@
lmStudioEndpoint = await aiService.getLMStudioEndpoint();
lmStudioModel = await aiService.getLMStudioModelName();
+ openRouterKey = await aiService.getOpenRouterKey();
+ openRouterModel = await aiService.getOpenRouterModelName();
+
// Ensure reactive declarations have finished running before we set initialized to true
await tick();
@@ -191,6 +196,12 @@
run(() => {
setConfiguration(GitAIConfigKey.LMStudioModelName, lmStudioModel);
});
+ run(() => {
+ setSecret(AISecretHandle.OpenRouterKey, openRouterKey);
+ });
+ run(() => {
+ setConfiguration(GitAIConfigKey.OpenRouterModelName, openRouterModel);
+ });
run(() => {
if (form) form.modelKind.value = modelKind;
});
@@ -204,8 +215,8 @@
{/snippet}
- GitButler supports multiple AI providers: OpenAI and Anthropic (via API or your own key), plus
- local models through Ollama and LM Studio.
+ GitButler supports multiple AI providers: OpenAI and Anthropic (via API or your own key),
+ OpenRouter for access to hundreds of models, plus local models through Ollama and LM Studio.
@@ -417,6 +428,32 @@
{/if}
+
+ {#snippet title()}
+ OpenRouter
+ {/snippet}
+ {#snippet actions()}
+
+ {/snippet}
+
+ {#if modelKind === ModelKind.OpenRouter}
+
+
+
+
+
+ {/if}
+
diff --git a/apps/desktop/src/lib/ai/openAIClient.ts b/apps/desktop/src/lib/ai/openAIClient.ts
index afad0a0c0f4..5b269549533 100644
--- a/apps/desktop/src/lib/ai/openAIClient.ts
+++ b/apps/desktop/src/lib/ai/openAIClient.ts
@@ -4,7 +4,13 @@ import {
SHORT_DEFAULT_PR_TEMPLATE,
} from "$lib/ai/prompts";
import OpenAI from "openai";
-import type { OpenAIModelName, Prompt, AIClient, AIEvalOptions } from "$lib/ai/types";
+import type {
+ OpenAIModelName,
+ OpenRouterModelName,
+ Prompt,
+ AIClient,
+ AIEvalOptions,
+} from "$lib/ai/types";
const DEFAULT_MAX_TOKENS = 1024;
@@ -15,9 +21,13 @@ export class OpenAIClient implements AIClient {
private client: OpenAI;
private openAIKey: string;
- private modelName: OpenAIModelName;
+ private modelName: OpenAIModelName | OpenRouterModelName;
- constructor(openAIKey: string, modelName: OpenAIModelName, baseURL: string | undefined) {
+ constructor(
+ openAIKey: string,
+ modelName: OpenAIModelName | OpenRouterModelName,
+ baseURL: string | undefined,
+ ) {
this.openAIKey = openAIKey;
this.modelName = modelName;
this.client = new OpenAI({ apiKey: openAIKey, dangerouslyAllowBrowser: true, baseURL });
diff --git a/apps/desktop/src/lib/ai/service.test.ts b/apps/desktop/src/lib/ai/service.test.ts
index f1ff89e6d96..caee8e0cf9b 100644
--- a/apps/desktop/src/lib/ai/service.test.ts
+++ b/apps/desktop/src/lib/ai/service.test.ts
@@ -40,6 +40,7 @@ const defaultGitConfig = Object.freeze({
const defaultSecretsConfig = Object.freeze({
[AISecretHandle.AnthropicKey]: undefined,
[AISecretHandle.OpenAIKey]: undefined,
+ [AISecretHandle.OpenRouterKey]: undefined,
});
class DummyGitConfigService extends GitConfigService {
@@ -248,6 +249,38 @@ describe("AIService", () => {
),
);
});
+
+ test("When ai provider is OpenRouter, When an API key is present. It returns OpenAIClient", async () => {
+ const gitConfig = new DummyGitConfigService({
+ ...defaultGitConfig,
+ [GitAIConfigKey.ModelProvider]: ModelKind.OpenRouter,
+ });
+ const secretsService = new DummySecretsService({
+ [AISecretHandle.OpenRouterKey]: "sk-or-test-key",
+ });
+ const tokenMemoryService = new TokenMemoryService();
+ const fetchMock = vi.fn();
+ const cloud = new HttpClient(fetchMock, "https://www.example.com", tokenMemoryService.token);
+ const aiService = new AIService(gitConfig, secretsService, cloud, tokenMemoryService);
+
+ expect(await aiService.buildClient()).toBeInstanceOf(OpenAIClient);
+ });
+
+ test("When ai provider is OpenRouter, When an API key is blank. It throws an error", async () => {
+ const gitConfig = new DummyGitConfigService({
+ ...defaultGitConfig,
+ [GitAIConfigKey.ModelProvider]: ModelKind.OpenRouter,
+ });
+ const secretsService = new DummySecretsService();
+ const tokenMemoryService = new TokenMemoryService();
+ const fetchMock = vi.fn();
+ const cloud = new HttpClient(fetchMock, "https://www.example.com", tokenMemoryService.token);
+ const aiService = new AIService(gitConfig, secretsService, cloud, tokenMemoryService);
+
+ await expect(aiService.buildClient.bind(aiService)).rejects.toThrowError(
+ new Error("When using OpenRouter, you must provide a valid API key"),
+ );
+ });
});
describe.concurrent("#summarizeCommit", async () => {
diff --git a/apps/desktop/src/lib/ai/service.ts b/apps/desktop/src/lib/ai/service.ts
index 997fea7d2b9..776d7150dab 100644
--- a/apps/desktop/src/lib/ai/service.ts
+++ b/apps/desktop/src/lib/ai/service.ts
@@ -24,6 +24,7 @@ import {
AnthropicModelName,
ModelKind,
MessageRole,
+ type OpenRouterModelName,
type Prompt,
type PromptMessage,
type FileChange,
@@ -47,6 +48,7 @@ export enum KeyOption {
export enum AISecretHandle {
OpenAIKey = "aiOpenAIKey",
AnthropicKey = "aiAnthropicKey",
+ OpenRouterKey = "aiOpenRouterKey",
}
export enum GitAIConfigKey {
@@ -61,6 +63,7 @@ export enum GitAIConfigKey {
OllamaModelName = "gitbutler.aiOllamaModelName",
LMStudioEndpoint = "gitbutler.aiLMStudioEndpoint",
LMStudioModelName = "gitbutler.aiLMStudioModelName",
+ OpenRouterModelName = "gitbutler.aiOpenRouterModelName",
}
interface BaseAIServiceOpts {
@@ -228,6 +231,17 @@ export class AIService {
);
}
+ async getOpenRouterKey() {
+ return await this.secretsService.get(AISecretHandle.OpenRouterKey);
+ }
+
+ async getOpenRouterModelName() {
+ return await this.gitConfig.getWithDefault(
+ GitAIConfigKey.OpenRouterModelName,
+ "openai/gpt-4.1-mini",
+ );
+ }
+
async usingGitButlerAPI() {
const modelKind = await this.getModelKind();
const openAIKeyOption = await this.getOpenAIKeyOption();
@@ -258,12 +272,15 @@ export class AIService {
modelKind === ModelKind.Ollama && !!ollamaEndpoint && !!ollamaModelName;
const lmStudioActiveAndEndpointProvided =
modelKind === ModelKind.LMStudio && !!lmStudioEndpoint && !!lmStudioModelName;
+ const openRouterActiveAndKeyProvided =
+ modelKind === ModelKind.OpenRouter && !!(await this.getOpenRouterKey());
return (
openAIActiveAndKeyProvided ||
anthropicActiveAndKeyProvided ||
ollamaActiveAndEndpointProvided ||
- lmStudioActiveAndEndpointProvided
+ lmStudioActiveAndEndpointProvided ||
+ openRouterActiveAndKeyProvided
);
}
@@ -334,6 +351,23 @@ export class AIService {
return new AnthropicAIClient(anthropicKey, anthropicModelName);
}
+ if (modelKind === ModelKind.OpenRouter) {
+ const openRouterKey = await this.getOpenRouterKey();
+ const openRouterModelName = await this.getOpenRouterModelName();
+
+ if (!openRouterKey) {
+ throw new Error(
+ "When using OpenRouter, you must provide a valid API key",
+ );
+ }
+
+ return new OpenAIClient(
+ openRouterKey,
+ openRouterModelName as OpenRouterModelName,
+ "https://openrouter.ai/api/v1",
+ );
+ }
+
return undefined;
}
diff --git a/apps/desktop/src/lib/ai/types.ts b/apps/desktop/src/lib/ai/types.ts
index 6873bdd3948..3f588683d2c 100644
--- a/apps/desktop/src/lib/ai/types.ts
+++ b/apps/desktop/src/lib/ai/types.ts
@@ -6,8 +6,12 @@ export enum ModelKind {
Anthropic = "anthropic",
Ollama = "ollama",
LMStudio = "lmstudio",
+ OpenRouter = "openrouter",
}
+// OpenRouter model names follow the `provider/model` format (e.g. `openai/gpt-4.1-mini`)
+export type OpenRouterModelName = `${string}/${string}`;
+
// https://platform.openai.com/docs/models
export enum OpenAIModelName {
O3mini = "o3-mini",
diff --git a/crates/but-llm/src/lib.rs b/crates/but-llm/src/lib.rs
index 7aa854881b4..0302dd86d1d 100644
--- a/crates/but-llm/src/lib.rs
+++ b/crates/but-llm/src/lib.rs
@@ -6,6 +6,7 @@ mod lmstudio;
mod ollama;
mod openai;
mod openai_utils;
+mod openrouter;
use std::sync::Arc;
@@ -24,6 +25,7 @@ pub enum LLMProviderKind {
Anthropic,
Ollama,
LMStudio,
+ OpenRouter,
}
impl LLMProviderKind {
@@ -33,6 +35,7 @@ impl LLMProviderKind {
"anthropic" => Some(LLMProviderKind::Anthropic),
"ollama" => Some(LLMProviderKind::Ollama),
"lmstudio" => Some(LLMProviderKind::LMStudio),
+ "openrouter" => Some(LLMProviderKind::OpenRouter),
_ => None,
}
}
@@ -44,6 +47,7 @@ pub enum LLMProviderConfig {
Anthropic(Option),
Ollama(Option),
LMStudio(Option),
+ OpenRouter(Option),
}
#[derive(Debug, Clone)]
@@ -52,6 +56,7 @@ pub enum LLMClientType {
Anthropic(Arc),
Ollama(Arc),
LMStudio(Arc),
+ OpenRouter(Arc),
}
#[derive(Debug, Clone)]
@@ -95,6 +100,10 @@ impl LLMProvider {
}
LLMProviderConfig::LMStudio(config) => lmstudio::LMStudioProvider::with(config, None)
.map(|p| LLMClientType::LMStudio(Arc::new(p)))?,
+ LLMProviderConfig::OpenRouter(config) => {
+ openrouter::OpenRouterProvider::with(config, None)
+ .map(|p| LLMClientType::OpenRouter(Arc::new(p)))?
+ }
};
Some(Self { client })
}
@@ -151,6 +160,12 @@ impl LLMProvider {
client: LLMClientType::LMStudio(Arc::new(client)),
})
}
+ Some(LLMProviderKind::OpenRouter) => {
+ let client = openrouter::OpenRouterProvider::from_git_config(config)?;
+ Some(Self {
+ client: LLMClientType::OpenRouter(Arc::new(client)),
+ })
+ }
None => None,
}
}
@@ -175,6 +190,7 @@ impl LLMProvider {
LLMClientType::Anthropic(client) => client.model(),
LLMClientType::Ollama(client) => client.model(),
LLMClientType::LMStudio(client) => client.model(),
+ LLMClientType::OpenRouter(client) => client.model(),
}
}
@@ -291,6 +307,13 @@ impl LLMProvider {
model,
on_token,
),
+ LLMClientType::OpenRouter(client) => client.tool_calling_loop_stream(
+ system_message,
+ chat_messages,
+ tool_set,
+ model,
+ on_token,
+ ),
}
}
@@ -338,6 +361,9 @@ impl LLMProvider {
LLMClientType::LMStudio(client) => {
client.tool_calling_loop(system_message, chat_messages, tool_set, model)
}
+ LLMClientType::OpenRouter(client) => {
+ client.tool_calling_loop(system_message, chat_messages, tool_set, model)
+ }
}
}
@@ -379,6 +405,9 @@ impl LLMProvider {
LLMClientType::LMStudio(client) => {
client.stream_response(system_message, chat_messages, model, on_token)
}
+ LLMClientType::OpenRouter(client) => {
+ client.stream_response(system_message, chat_messages, model, on_token)
+ }
}
}
@@ -428,6 +457,9 @@ impl LLMProvider {
LLMClientType::LMStudio(client) => {
client.structured_output::(system_message, chat_messages, model)
}
+ LLMClientType::OpenRouter(client) => {
+ client.structured_output::(system_message, chat_messages, model)
+ }
}
}
@@ -463,6 +495,9 @@ impl LLMProvider {
LLMClientType::LMStudio(client) => {
client.response(system_message, chat_messages, model)
}
+ LLMClientType::OpenRouter(client) => {
+ client.response(system_message, chat_messages, model)
+ }
}
}
}
diff --git a/crates/but-llm/src/openrouter.rs b/crates/but-llm/src/openrouter.rs
new file mode 100644
index 00000000000..386c56b4263
--- /dev/null
+++ b/crates/but-llm/src/openrouter.rs
@@ -0,0 +1,171 @@
+use anyhow::Result;
+use async_openai::{Client, config::OpenAIConfig};
+use but_secret::{Sensitive, secret};
+use but_tools::tool::Toolset;
+use schemars::JsonSchema;
+use serde::de::DeserializeOwned;
+
+use crate::{
+ chat::ChatMessage,
+ client::LLMClient,
+ openai_utils::{
+ OpenAIClientProvider, response_blocking, stream_response_blocking,
+ structured_output_blocking, tool_calling_loop, tool_calling_loop_stream,
+ },
+};
+
+const OPENROUTER_API_BASE_DEFAULT: &str = "https://openrouter.ai/api/v1";
+const OPENROUTER_API_BASE_OPTION: &str = "gitbutler.aiOpenRouterEndpoint";
+const OPENROUTER_MODEL_NAME: &str = "gitbutler.aiOpenRouterModelName";
+
+#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
+pub struct OpenRouterConfig {
+ pub api_base: String,
+}
+
+impl Default for OpenRouterConfig {
+ fn default() -> Self {
+ Self {
+ api_base: OPENROUTER_API_BASE_DEFAULT.to_string(),
+ }
+ }
+}
+
+impl OpenRouterConfig {
+ fn from_git_config(config: &gix::config::File<'static>) -> Self {
+ let api_base = config
+ .string(OPENROUTER_API_BASE_OPTION)
+ .map(|v| v.to_string())
+ .unwrap_or_else(|| OPENROUTER_API_BASE_DEFAULT.to_string());
+
+ Self { api_base }
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct OpenRouterProvider {
+ model: Option,
+ config: OpenRouterConfig,
+ api_key: Sensitive,
+}
+
+impl OpenRouterProvider {
+ pub fn with(
+ config: Option,
+ model: Option,
+ ) -> Option {
+ let config = config.unwrap_or_default();
+ let api_key = Self::retrieve_api_key()?;
+ Some(Self {
+ config,
+ model,
+ api_key,
+ })
+ }
+
+ fn retrieve_api_key() -> Option> {
+ // Try secret storage first, then fall back to env var
+ if let Ok(Some(key)) = secret::retrieve("aiOpenRouterKey", secret::Namespace::Global) {
+ if !key.0.trim().is_empty() {
+ return Some(key);
+ }
+ }
+ if let Ok(val) = std::env::var("OPENROUTER_API_KEY") {
+ if !val.trim().is_empty() {
+ return Some(Sensitive(val));
+ }
+ }
+ None
+ }
+}
+
+impl OpenAIClientProvider for OpenRouterProvider {
+ fn client(&self) -> Result> {
+ let open_ai_config = OpenAIConfig::new()
+ .with_api_base(self.config.api_base.clone())
+ .with_api_key(self.api_key.0.clone());
+
+ Ok(Client::with_config(open_ai_config))
+ }
+}
+
+impl LLMClient for OpenRouterProvider {
+ fn from_git_config(config: &gix::config::File<'static>) -> Option
+ where
+ Self: Sized,
+ {
+ let openrouter_config = OpenRouterConfig::from_git_config(config);
+ let model = config
+ .string(OPENROUTER_MODEL_NAME)
+ .map(|v| v.to_string());
+ let api_key = Self::retrieve_api_key()?;
+ Some(Self {
+ config: openrouter_config,
+ model,
+ api_key,
+ })
+ }
+
+ fn model(&self) -> Option {
+ self.model.clone()
+ }
+
+ fn tool_calling_loop_stream(
+ &self,
+ system_message: &str,
+ chat_messages: Vec,
+ tool_set: &mut impl Toolset,
+ model: &str,
+ on_token: impl Fn(&str) + Send + Sync + 'static,
+ ) -> Result<(String, Vec)> {
+ let result = tool_calling_loop_stream(
+ self,
+ system_message,
+ chat_messages,
+ tool_set,
+ model,
+ on_token,
+ )?;
+ Ok((result.final_response, result.message_history))
+ }
+
+ fn tool_calling_loop(
+ &self,
+ system_message: &str,
+ chat_messages: Vec,
+ tool_set: &mut impl Toolset,
+ model: &str,
+ ) -> Result {
+ tool_calling_loop(self, system_message, chat_messages, tool_set, model)
+ }
+
+ fn stream_response(
+ &self,
+ system_message: &str,
+ chat_messages: Vec,
+ model: &str,
+ on_token: impl Fn(&str) + Send + Sync + 'static,
+ ) -> Result> {
+ stream_response_blocking(self, system_message, chat_messages, model, on_token)
+ }
+
+ fn structured_output<
+ T: serde::Serialize + DeserializeOwned + JsonSchema + std::marker::Send + 'static,
+ >(
+ &self,
+ system_message: &str,
+ chat_messages: Vec,
+ model: &str,
+ ) -> Result> {
+ structured_output_blocking::(self, system_message, chat_messages, model)
+ }
+
+ fn response(
+ &self,
+ system_message: &str,
+ chat_messages: Vec,
+ model: &str,
+ ) -> Result> {
+ response_blocking(self, system_message, chat_messages, model)
+ }
+}
diff --git a/crates/gitbutler-tauri/tauri.conf.json b/crates/gitbutler-tauri/tauri.conf.json
index a54252b745d..12c527d1f84 100644
--- a/crates/gitbutler-tauri/tauri.conf.json
+++ b/crates/gitbutler-tauri/tauri.conf.json
@@ -59,7 +59,7 @@
"csp": {
"default-src": "'self'",
"img-src": "'self' asset: https://asset.localhost data: tauri://localhost https://avatars.githubusercontent.com https://*.gitbutler.com https://gitbutler-public.s3.amazonaws.com https://*.gravatar.com https://io.wp.com https://i0.wp.com https://i1.wp.com https://i2.wp.com https://i3.wp.com https://github.com https://*.googleusercontent.com https://*.giphy.com/ blob:",
- "connect-src": "'self' ipc: http://ipc.localhost https://eu.posthog.com https://eu.i.posthog.com https://eu-assets.i.posthog.com https://app.gitbutler.com https://o4504644069687296.ingest.sentry.io ws://localhost:7703 https://github.com https://api.github.com https://api.openai.com https://api.anthropic.com https://app.staging.gitbutler.com https://*.gitlab.com https://gitlab.com wss://irc.gitbutler.com:8097 data:",
+ "connect-src": "'self' ipc: http://ipc.localhost https://eu.posthog.com https://eu.i.posthog.com https://eu-assets.i.posthog.com https://app.gitbutler.com https://o4504644069687296.ingest.sentry.io ws://localhost:7703 https://github.com https://api.github.com https://api.openai.com https://api.anthropic.com https://openrouter.ai https://app.staging.gitbutler.com https://*.gitlab.com https://gitlab.com wss://irc.gitbutler.com:8097 data:",
"script-src": "'self' https://eu.posthog.com https://eu.i.posthog.com https://eu-assets.i.posthog.com",
"style-src": "'self' 'unsafe-inline'"
}