diff --git a/README.md b/README.md index 4e9643b..8a7e50e 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ On first run, a default config is created at `~/.commandok/config.toml`. Add you ```toml [commandok] # Options: anthropic, openai, google, mistral, ollama, -# openrouter, xai, litert_lm +# openrouter, xai, vercel_ai_gateway, litert_lm provider = "anthropic" system_prompt = """\ You are a terminal command generator. Given a natural language description, output ONLY \ @@ -68,6 +68,11 @@ api_key = "" model = "grok-4.20-0309-reasoning" # api_url = "https://api.x.ai/v1" # default +[vercel_ai_gateway] +api_key = "" +model = "google/gemini-3-flash" +# api_url = "https://ai-gateway.vercel.sh/v1" # default + [litert_lm] model = "gemma-4-E2B-it.litertlm" huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm" diff --git a/src/config.rs b/src/config.rs index 740a3b6..9bc3162 100644 --- a/src/config.rs +++ b/src/config.rs @@ -14,6 +14,7 @@ pub struct Config { pub ollama: Option, pub openrouter: Option, pub xai: Option, + pub vercel_ai_gateway: Option, pub litert_lm: Option, } @@ -45,7 +46,7 @@ fn config_path() -> PathBuf { const DEFAULT_CONFIG: &str = r#"[commandok] # Options: anthropic, openai, google, mistral, ollama, -# openrouter, xai, litert_lm +# openrouter, xai, vercel_ai_gateway, litert_lm provider = "anthropic" system_prompt = """\ You are a terminal command generator. Given a natural language description, output ONLY \ @@ -84,6 +85,11 @@ api_key = "" model = "grok-4.20-0309-reasoning" # api_url = "https://api.x.ai/v1" # default +[vercel_ai_gateway] +api_key = "" +model = "google/gemini-3-flash" +# api_url = "https://ai-gateway.vercel.sh/v1" # default + [litert_lm] model = "gemma-4-E2B-it.litertlm" huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm" @@ -150,6 +156,7 @@ const PROVIDER_ORDER: &[&str] = &[ "ollama", "openrouter", "xai", + "vercel_ai_gateway", "litert_lm", ]; @@ -163,6 +170,7 @@ impl Config { "ollama" => self.ollama.as_ref(), "openrouter" => self.openrouter.as_ref(), "xai" => self.xai.as_ref(), + "vercel_ai_gateway" => self.vercel_ai_gateway.as_ref(), "litert_lm" => self.litert_lm.as_ref(), _ => None, } diff --git a/src/provider/mod.rs b/src/provider/mod.rs index 577ef9d..9a8be0c 100644 --- a/src/provider/mod.rs +++ b/src/provider/mod.rs @@ -5,6 +5,7 @@ pub mod mistral; pub mod ollama; pub mod openai; pub mod openrouter; +pub mod vercel_ai_gateway; pub mod xai; use crate::config::ProviderConfig; @@ -25,6 +26,7 @@ pub enum Provider { Ollama(ProviderConfig), OpenRouter(ProviderConfig), Xai(ProviderConfig), + VercelAiGateway(ProviderConfig), LitertLm(ProviderConfig), } @@ -38,6 +40,7 @@ impl Provider { "ollama" => Provider::Ollama(cfg.clone()), "openrouter" => Provider::OpenRouter(cfg.clone()), "xai" => Provider::Xai(cfg.clone()), + "vercel_ai_gateway" => Provider::VercelAiGateway(cfg.clone()), "litert_lm" => Provider::LitertLm(cfg.clone()), _ => unreachable!("validated in config"), } @@ -57,6 +60,7 @@ impl Provider { Provider::Ollama(cfg) => ollama::stream(cfg, query, system_prompt, tx).await, Provider::OpenRouter(cfg) => openrouter::stream(cfg, query, system_prompt, tx).await, Provider::Xai(cfg) => xai::stream(cfg, query, system_prompt, tx).await, + Provider::VercelAiGateway(cfg) => vercel_ai_gateway::stream(cfg, query, system_prompt, tx).await, Provider::LitertLm(cfg) => litert_lm::stream(cfg, query, system_prompt, tx).await, } } diff --git a/src/provider/vercel_ai_gateway.rs b/src/provider/vercel_ai_gateway.rs new file mode 100644 index 0000000..665a784 --- /dev/null +++ b/src/provider/vercel_ai_gateway.rs @@ -0,0 +1,46 @@ +use super::ApiEvent; +use crate::config::ProviderConfig; +use tokio::sync::mpsc; + +const DEFAULT_BASE_URL: &str = "https://ai-gateway.vercel.sh/v1"; + +pub async fn stream( + cfg: &ProviderConfig, + query: &str, + system_prompt: &str, + tx: mpsc::UnboundedSender, +) { + let base_url = if cfg.api_url.is_empty() { + DEFAULT_BASE_URL + } else { + cfg.api_url.trim_end_matches('/') + }; + + let client = reqwest::Client::new(); + let body = serde_json::json!({ + "model": cfg.model, + "instructions": system_prompt, + "input": query, + "stream": true, + "max_output_tokens": 256, + }); + + let request = client + .post(format!("{base_url}/responses")) + .header("Authorization", format!("Bearer {}", cfg.api_key)) + .header("content-type", "application/json") + .json(&body); + + let Some(resp) = super::send_request(request, &tx).await else { + return; + }; + + super::parse_sse_stream(resp, &tx, |json| { + if json["type"] == "response.output_text.delta" { + json["delta"].as_str().map(String::from) + } else { + None + } + }) + .await; +}