diff --git a/README.md b/README.md index 574fdc8..4e7949f 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,10 @@ model = "google/gemini-3-flash" model = "gemma-4-E2B-it.litertlm" huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm" +[lmstudio] +model = "qwen/qwen3-coder-next" +# api_url = "http://localhost:1234" # default, change if running elsewhere + [apple_intelligence] model = "system" ``` diff --git a/src/config.rs b/src/config.rs index 771fd70..93b1178 100644 --- a/src/config.rs +++ b/src/config.rs @@ -10,6 +10,7 @@ pub struct Config { pub anthropic: Option, pub openai: Option, pub google: Option, + pub lmstudio: Option, pub mistral: Option, pub ollama: Option, pub openrouter: Option, @@ -101,6 +102,10 @@ model = "google/gemini-3-flash" model = "gemma-4-E2B-it.litertlm" huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm" +[lmstudio] +model = "qwen/qwen3-coder-next" +# api_url = "http://localhost:1234" # default, change if running elsewhere + # On-device Apple Intelligence (FoundationModels framework). # Only used when commandok was built with: cargo install commandok --features apple-intelligence # Requires macOS 26+ on Apple Silicon and Apple Intelligence enabled in System Settings. @@ -165,6 +170,7 @@ const PROVIDER_ORDER: &[&str] = &[ "anthropic", "openai", "google", + "lmstudio", "mistral", "ollama", "openrouter", @@ -185,6 +191,7 @@ impl Config { "anthropic" => self.anthropic.as_ref(), "openai" => self.openai.as_ref(), "google" => self.google.as_ref(), + "lmstudio" => self.lmstudio.as_ref(), "mistral" => self.mistral.as_ref(), "ollama" => self.ollama.as_ref(), "openrouter" => self.openrouter.as_ref(), diff --git a/src/provider/lmstudio.rs b/src/provider/lmstudio.rs new file mode 100644 index 0000000..c5a1180 --- /dev/null +++ b/src/provider/lmstudio.rs @@ -0,0 +1,64 @@ +use super::ApiEvent; +use crate::config::ProviderConfig; +use tokio::sync::mpsc; + +pub async fn stream( + cfg: &ProviderConfig, + query: &str, + system_prompt: &str, + tx: mpsc::UnboundedSender, +) { + let client = reqwest::Client::new(); + let base_url = if cfg.api_url.is_empty() { + "http://localhost:1234" + } else { + cfg.api_url.trim_end_matches('/') + }; + let url = format!("{base_url}/api/v1/chat"); + + let body = serde_json::json!({ + "model": cfg.model, + "system_prompt": system_prompt, + "input": query, + }); + + let request = client + .post(&url) + .header("content-type", "application/json") + .json(&body); + + let Some(resp) = super::send_request(request, &tx).await else { + return; + }; + + let json = match resp.json::().await { + Ok(j) => j, + Err(e) => { + let _ = tx.send(ApiEvent::Error(e.to_string())); + return; + } + }; + + let outputs = json["output"].as_array(); + let has_content = outputs + .map(|arr| arr.iter().any(|item| item["content"].as_str().is_some())) + .unwrap_or(false); + + if !has_content { + let err_msg = json["error"] + .as_str() + .unwrap_or("invalid response: output[].content missing"); + let _ = tx.send(ApiEvent::Error(err_msg.to_string())); + return; + } + + for item in outputs.unwrap() { + if let Some(text) = item["content"].as_str() { + if !text.is_empty() && tx.send(ApiEvent::Delta(text.to_string())).is_err() { + return; + } + } + } + + let _ = tx.send(ApiEvent::Done); +} diff --git a/src/provider/mod.rs b/src/provider/mod.rs index 45e31f6..08afc0c 100644 --- a/src/provider/mod.rs +++ b/src/provider/mod.rs @@ -7,6 +7,7 @@ pub mod apple_intelligence; pub mod claude; pub mod gemini; pub mod litert_lm; +pub mod lmstudio; pub mod mistral; pub mod ollama; pub mod openai; @@ -28,6 +29,7 @@ pub enum Provider { Anthropic(ProviderConfig), OpenAi(ProviderConfig), Google(ProviderConfig), + LMStudio(ProviderConfig), Mistral(ProviderConfig), Ollama(ProviderConfig), OpenRouter(ProviderConfig), @@ -48,6 +50,7 @@ impl Provider { "anthropic" => Provider::Anthropic(cfg.clone()), "openai" => Provider::OpenAi(cfg.clone()), "google" => Provider::Google(cfg.clone()), + "lmstudio" => Provider::LMStudio(cfg.clone()), "mistral" => Provider::Mistral(cfg.clone()), "ollama" => Provider::Ollama(cfg.clone()), "openrouter" => Provider::OpenRouter(cfg.clone()), @@ -74,6 +77,7 @@ impl Provider { Provider::Anthropic(cfg) => claude::stream(cfg, query, system_prompt, tx).await, Provider::OpenAi(cfg) => openai::stream(cfg, query, system_prompt, tx).await, Provider::Google(cfg) => gemini::stream(cfg, query, system_prompt, tx).await, + Provider::LMStudio(cfg) => lmstudio::stream(cfg, query, system_prompt, tx).await, Provider::Mistral(cfg) => mistral::stream(cfg, query, system_prompt, tx).await, Provider::Ollama(cfg) => ollama::stream(cfg, query, system_prompt, tx).await, Provider::OpenRouter(cfg) => openrouter::stream(cfg, query, system_prompt, tx).await,