Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ On first run, a default config is created at `~/.commandok/config.toml`. Add you
```toml
[commandok]
# Options: anthropic, openai, google, mistral, ollama,
# openrouter, xai, litert_lm
# openrouter, xai, vercel_ai_gateway, litert_lm
provider = "anthropic"
system_prompt = """\
You are a terminal command generator. Given a natural language description, output ONLY \
Expand Down Expand Up @@ -68,6 +68,11 @@ api_key = ""
model = "grok-4.20-0309-reasoning"
# api_url = "https://api.x.ai/v1" # default

[vercel_ai_gateway]
api_key = ""
model = "google/gemini-3-flash"
# api_url = "https://ai-gateway.vercel.sh/v1" # default

[litert_lm]
model = "gemma-4-E2B-it.litertlm"
huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm"
Expand Down
10 changes: 9 additions & 1 deletion src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ pub struct Config {
pub ollama: Option<ProviderConfig>,
pub openrouter: Option<ProviderConfig>,
pub xai: Option<ProviderConfig>,
pub vercel_ai_gateway: Option<ProviderConfig>,
pub litert_lm: Option<ProviderConfig>,
}

Expand Down Expand Up @@ -45,7 +46,7 @@ fn config_path() -> PathBuf {

const DEFAULT_CONFIG: &str = r#"[commandok]
# Options: anthropic, openai, google, mistral, ollama,
# openrouter, xai, litert_lm
# openrouter, xai, vercel_ai_gateway, litert_lm
provider = "anthropic"
system_prompt = """\
You are a terminal command generator. Given a natural language description, output ONLY \
Expand Down Expand Up @@ -84,6 +85,11 @@ api_key = ""
model = "grok-4.20-0309-reasoning"
# api_url = "https://api.x.ai/v1" # default

[vercel_ai_gateway]
api_key = ""
model = "google/gemini-3-flash"
# api_url = "https://ai-gateway.vercel.sh/v1" # default

[litert_lm]
model = "gemma-4-E2B-it.litertlm"
huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm"
Expand Down Expand Up @@ -150,6 +156,7 @@ const PROVIDER_ORDER: &[&str] = &[
"ollama",
"openrouter",
"xai",
"vercel_ai_gateway",
"litert_lm",
];

Expand All @@ -163,6 +170,7 @@ impl Config {
"ollama" => self.ollama.as_ref(),
"openrouter" => self.openrouter.as_ref(),
"xai" => self.xai.as_ref(),
"vercel_ai_gateway" => self.vercel_ai_gateway.as_ref(),
"litert_lm" => self.litert_lm.as_ref(),
_ => None,
}
Expand Down
4 changes: 4 additions & 0 deletions src/provider/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ pub mod mistral;
pub mod ollama;
pub mod openai;
pub mod openrouter;
pub mod vercel_ai_gateway;
pub mod xai;

use crate::config::ProviderConfig;
Expand All @@ -25,6 +26,7 @@ pub enum Provider {
Ollama(ProviderConfig),
OpenRouter(ProviderConfig),
Xai(ProviderConfig),
VercelAiGateway(ProviderConfig),
LitertLm(ProviderConfig),
}

Expand All @@ -38,6 +40,7 @@ impl Provider {
"ollama" => Provider::Ollama(cfg.clone()),
"openrouter" => Provider::OpenRouter(cfg.clone()),
"xai" => Provider::Xai(cfg.clone()),
"vercel_ai_gateway" => Provider::VercelAiGateway(cfg.clone()),
"litert_lm" => Provider::LitertLm(cfg.clone()),
_ => unreachable!("validated in config"),
}
Expand All @@ -57,6 +60,7 @@ impl Provider {
Provider::Ollama(cfg) => ollama::stream(cfg, query, system_prompt, tx).await,
Provider::OpenRouter(cfg) => openrouter::stream(cfg, query, system_prompt, tx).await,
Provider::Xai(cfg) => xai::stream(cfg, query, system_prompt, tx).await,
Provider::VercelAiGateway(cfg) => vercel_ai_gateway::stream(cfg, query, system_prompt, tx).await,
Provider::LitertLm(cfg) => litert_lm::stream(cfg, query, system_prompt, tx).await,
}
}
Expand Down
46 changes: 46 additions & 0 deletions src/provider/vercel_ai_gateway.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
use super::ApiEvent;
use crate::config::ProviderConfig;
use tokio::sync::mpsc;

const DEFAULT_BASE_URL: &str = "https://ai-gateway.vercel.sh/v1";

pub async fn stream(
cfg: &ProviderConfig,
query: &str,
system_prompt: &str,
tx: mpsc::UnboundedSender<ApiEvent>,
) {
let base_url = if cfg.api_url.is_empty() {
DEFAULT_BASE_URL
} else {
cfg.api_url.trim_end_matches('/')
};

let client = reqwest::Client::new();
let body = serde_json::json!({
"model": cfg.model,
"instructions": system_prompt,
"input": query,
"stream": true,
"max_output_tokens": 256,
});

let request = client
.post(format!("{base_url}/responses"))
.header("Authorization", format!("Bearer {}", cfg.api_key))
.header("content-type", "application/json")
.json(&body);

let Some(resp) = super::send_request(request, &tx).await else {
return;
};

super::parse_sse_stream(resp, &tx, |json| {
if json["type"] == "response.output_text.delta" {
json["delta"].as_str().map(String::from)
} else {
None
}
})
.await;
}