Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,10 @@ model = "google/gemini-3-flash"
model = "gemma-4-E2B-it.litertlm"
huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm"

[lmstudio]
model = "qwen/qwen3-coder-next"
# api_url = "http://localhost:1234" # default, change if running elsewhere

[apple_intelligence]
model = "system"
```
Expand Down
7 changes: 7 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ pub struct Config {
pub anthropic: Option<ProviderConfig>,
pub openai: Option<ProviderConfig>,
pub google: Option<ProviderConfig>,
pub lmstudio: Option<ProviderConfig>,
pub mistral: Option<ProviderConfig>,
pub ollama: Option<ProviderConfig>,
pub openrouter: Option<ProviderConfig>,
Expand Down Expand Up @@ -101,6 +102,10 @@ model = "google/gemini-3-flash"
model = "gemma-4-E2B-it.litertlm"
huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm"

[lmstudio]
model = "qwen/qwen3-coder-next"
# api_url = "http://localhost:1234" # default, change if running elsewhere

# On-device Apple Intelligence (FoundationModels framework).
# Only used when commandok was built with: cargo install commandok --features apple-intelligence
# Requires macOS 26+ on Apple Silicon and Apple Intelligence enabled in System Settings.
Expand Down Expand Up @@ -165,6 +170,7 @@ const PROVIDER_ORDER: &[&str] = &[
"anthropic",
"openai",
"google",
"lmstudio",
"mistral",
"ollama",
"openrouter",
Expand All @@ -185,6 +191,7 @@ impl Config {
"anthropic" => self.anthropic.as_ref(),
"openai" => self.openai.as_ref(),
"google" => self.google.as_ref(),
"lmstudio" => self.lmstudio.as_ref(),
"mistral" => self.mistral.as_ref(),
"ollama" => self.ollama.as_ref(),
"openrouter" => self.openrouter.as_ref(),
Expand Down
64 changes: 64 additions & 0 deletions src/provider/lmstudio.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
use super::ApiEvent;
use crate::config::ProviderConfig;
use tokio::sync::mpsc;

pub async fn stream(
cfg: &ProviderConfig,
query: &str,
system_prompt: &str,
tx: mpsc::UnboundedSender<ApiEvent>,
) {
let client = reqwest::Client::new();
let base_url = if cfg.api_url.is_empty() {
"http://localhost:1234"
} else {
cfg.api_url.trim_end_matches('/')
};
let url = format!("{base_url}/api/v1/chat");

let body = serde_json::json!({
"model": cfg.model,
"system_prompt": system_prompt,
"input": query,
});

let request = client
.post(&url)
.header("content-type", "application/json")
.json(&body);

let Some(resp) = super::send_request(request, &tx).await else {
return;
};

let json = match resp.json::<serde_json::Value>().await {
Ok(j) => j,
Err(e) => {
let _ = tx.send(ApiEvent::Error(e.to_string()));
return;
}
};

let outputs = json["output"].as_array();
let has_content = outputs
.map(|arr| arr.iter().any(|item| item["content"].as_str().is_some()))
.unwrap_or(false);

if !has_content {
let err_msg = json["error"]
.as_str()
.unwrap_or("invalid response: output[].content missing");
let _ = tx.send(ApiEvent::Error(err_msg.to_string()));
return;
}

for item in outputs.unwrap() {
if let Some(text) = item["content"].as_str() {
if !text.is_empty() && tx.send(ApiEvent::Delta(text.to_string())).is_err() {
return;
}
}
}

let _ = tx.send(ApiEvent::Done);
}
4 changes: 4 additions & 0 deletions src/provider/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ pub mod apple_intelligence;
pub mod claude;
pub mod gemini;
pub mod litert_lm;
pub mod lmstudio;
pub mod mistral;
pub mod ollama;
pub mod openai;
Expand All @@ -28,6 +29,7 @@ pub enum Provider {
Anthropic(ProviderConfig),
OpenAi(ProviderConfig),
Google(ProviderConfig),
LMStudio(ProviderConfig),
Mistral(ProviderConfig),
Ollama(ProviderConfig),
OpenRouter(ProviderConfig),
Expand All @@ -48,6 +50,7 @@ impl Provider {
"anthropic" => Provider::Anthropic(cfg.clone()),
"openai" => Provider::OpenAi(cfg.clone()),
"google" => Provider::Google(cfg.clone()),
"lmstudio" => Provider::LMStudio(cfg.clone()),
"mistral" => Provider::Mistral(cfg.clone()),
"ollama" => Provider::Ollama(cfg.clone()),
"openrouter" => Provider::OpenRouter(cfg.clone()),
Expand All @@ -74,6 +77,7 @@ impl Provider {
Provider::Anthropic(cfg) => claude::stream(cfg, query, system_prompt, tx).await,
Provider::OpenAi(cfg) => openai::stream(cfg, query, system_prompt, tx).await,
Provider::Google(cfg) => gemini::stream(cfg, query, system_prompt, tx).await,
Provider::LMStudio(cfg) => lmstudio::stream(cfg, query, system_prompt, tx).await,
Provider::Mistral(cfg) => mistral::stream(cfg, query, system_prompt, tx).await,
Provider::Ollama(cfg) => ollama::stream(cfg, query, system_prompt, tx).await,
Provider::OpenRouter(cfg) => openrouter::stream(cfg, query, system_prompt, tx).await,
Expand Down