Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,12 @@ readme = "README.md"
homepage = "https://github.com/64bit/commandOK"
repository = "https://github.com/64bit/commandOK"

[features]
default = []
# On-device Apple Intelligence via the FoundationModels framework.
# Requires macOS 26+ on Apple Silicon and the Swift toolchain (xcrun/swiftc) at build time.
apple-intelligence = []

[dependencies]
ratatui = "0.30"
unicode-width = "0.2.2"
Expand Down
16 changes: 15 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@ On first run, a default config is created at `~/.commandok/config.toml`. Add you
```toml
[commandok]
# Options: anthropic, openai, google, mistral, ollama,
# openrouter, xai, vercel_ai_gateway, litert_lm
# openrouter, xai, vercel_ai_gateway, litert_lm,
# apple_intelligence (requires building with --features apple-intelligence on macOS 26+ ARM)
provider = "anthropic"
system_prompt = """\
You are a terminal command generator. Given a natural language description, output ONLY \
Expand Down Expand Up @@ -76,8 +77,21 @@ model = "google/gemini-3-flash"
[litert_lm]
model = "gemma-4-E2B-it.litertlm"
huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm"

[apple_intelligence]
model = "system"
```

## Apple Intelligence (optional)

On macOS 26+ on Apple Silicon, commandOK can run prompts entirely on-device through Apple's FoundationModels framework. It is gated behind a Cargo feature so the default install does not require the Swift toolchain.

```bash
cargo install commandok --features apple-intelligence
```

Building the feature requires the Xcode Command Line Tools (`xcode-select --install`). At runtime, Apple Intelligence must be enabled in System Settings.

## Usage

Run `commandok` in any terminal. A search bar appears inline below your cursor.
Expand Down
73 changes: 73 additions & 0 deletions build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
use std::env;
use std::path::PathBuf;
use std::process::Command;

fn main() {
println!("cargo:rerun-if-changed=build.rs");
println!("cargo:rerun-if-changed=src/provider/apple_intelligence_bridge.swift");

if env::var_os("CARGO_FEATURE_APPLE_INTELLIGENCE").is_none() {
return;
}

let target_os = env::var("CARGO_CFG_TARGET_OS").unwrap_or_default();
let target_arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap_or_default();
if target_os != "macos" || target_arch != "aarch64" {
panic!(
"the `apple-intelligence` feature requires the aarch64-apple-darwin target \
(got target_os={target_os}, target_arch={target_arch})",
);
}

// Locate the macOS SDK via xcrun. If xcrun is missing, the user needs Xcode CLT.
let sdk_path_out = Command::new("xcrun")
.args(["-sdk", "macosx", "--show-sdk-path"])
.output()
.expect(
"failed to invoke `xcrun`; install the Xcode Command Line Tools \
(`xcode-select --install`) to build the apple-intelligence feature",
);
if !sdk_path_out.status.success() {
panic!(
"`xcrun -sdk macosx --show-sdk-path` failed: {}",
String::from_utf8_lossy(&sdk_path_out.stderr)
);
}

let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let lib_path = out_dir.join("libapple_intelligence_bridge.a");
let src_path = "src/provider/apple_intelligence_bridge.swift";

let status = Command::new("xcrun")
.args([
"-sdk",
"macosx",
"swiftc",
"-emit-library",
"-static",
"-parse-as-library",
"-O",
"-target",
"arm64-apple-macos26.0",
"-module-name",
"AppleIntelligenceBridge",
"-o",
])
.arg(&lib_path)
.arg(src_path)
.status()
.expect("failed to invoke `xcrun swiftc`");

if !status.success() {
panic!("swiftc failed to build the Apple Intelligence bridge");
}

println!("cargo:rustc-link-search=native={}", out_dir.display());
println!("cargo:rustc-link-lib=static=apple_intelligence_bridge");
println!("cargo:rustc-link-lib=framework=FoundationModels");
println!("cargo:rustc-link-lib=framework=Foundation");

// Pull in the system Swift runtime (ABI-stable since macOS 10.14.4).
println!("cargo:rustc-link-search=/usr/lib/swift");
println!("cargo:rustc-link-arg=-Wl,-rpath,/usr/lib/swift");
}
27 changes: 26 additions & 1 deletion src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,12 @@ pub struct Config {
pub xai: Option<ProviderConfig>,
pub vercel_ai_gateway: Option<ProviderConfig>,
pub litert_lm: Option<ProviderConfig>,
#[cfg(all(
feature = "apple-intelligence",
target_os = "macos",
target_arch = "aarch64"
))]
pub apple_intelligence: Option<ProviderConfig>,
}

#[derive(Deserialize)]
Expand Down Expand Up @@ -46,7 +52,8 @@ fn config_path() -> PathBuf {

const DEFAULT_CONFIG: &str = r#"[commandok]
# Options: anthropic, openai, google, mistral, ollama,
# openrouter, xai, vercel_ai_gateway, litert_lm
# openrouter, xai, vercel_ai_gateway, litert_lm,
# apple_intelligence (requires building with --features apple-intelligence on macOS 26+ ARM)
provider = "anthropic"
system_prompt = """\
You are a terminal command generator. Given a natural language description, output ONLY \
Expand Down Expand Up @@ -93,6 +100,12 @@ model = "google/gemini-3-flash"
[litert_lm]
model = "gemma-4-E2B-it.litertlm"
huggingface_repo = "litert-community/gemma-4-E2B-it-litert-lm"

# On-device Apple Intelligence (FoundationModels framework).
# Only used when commandok was built with: cargo install commandok --features apple-intelligence
# Requires macOS 26+ on Apple Silicon and Apple Intelligence enabled in System Settings.
[apple_intelligence]
model = "system"
"#;

pub fn load() -> Result<Config, String> {
Expand Down Expand Up @@ -158,6 +171,12 @@ const PROVIDER_ORDER: &[&str] = &[
"xai",
"vercel_ai_gateway",
"litert_lm",
#[cfg(all(
feature = "apple-intelligence",
target_os = "macos",
target_arch = "aarch64"
))]
"apple_intelligence",
];

impl Config {
Expand All @@ -172,6 +191,12 @@ impl Config {
"xai" => self.xai.as_ref(),
"vercel_ai_gateway" => self.vercel_ai_gateway.as_ref(),
"litert_lm" => self.litert_lm.as_ref(),
#[cfg(all(
feature = "apple-intelligence",
target_os = "macos",
target_arch = "aarch64"
))]
"apple_intelligence" => self.apple_intelligence.as_ref(),
_ => None,
}
}
Expand Down
109 changes: 109 additions & 0 deletions src/provider/apple_intelligence.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
//! On-device LLM provider backed by Apple's FoundationModels framework.
//!
//! This module is gated to macOS 26+ on Apple Silicon and only compiles when
//! the `apple-intelligence` Cargo feature is enabled. The actual streaming
//! happens in a Swift bridge (see `apple_intelligence_bridge.swift`); this
//! file is the thin Rust wrapper around its C ABI.

use super::ApiEvent;
use crate::config::ProviderConfig;
use std::ffi::{CStr, CString, c_char, c_void};
use tokio::sync::mpsc;

unsafe extern "C" {
fn apple_intelligence_available() -> i32;
fn apple_intelligence_unavailable_reason(buf: *mut c_char, len: usize) -> usize;
fn apple_intelligence_stream(
instructions: *const c_char,
prompt: *const c_char,
user_data: *mut c_void,
on_delta: extern "C" fn(*mut c_void, *const c_char),
on_done: extern "C" fn(*mut c_void, i32, *const c_char),
);
}

extern "C" fn on_delta(user_data: *mut c_void, text: *const c_char) {
if user_data.is_null() || text.is_null() {
return;
}
let tx = unsafe { &*(user_data as *const mpsc::UnboundedSender<ApiEvent>) };
let s = unsafe { CStr::from_ptr(text) }.to_string_lossy().into_owned();
let _ = tx.send(ApiEvent::Delta(s));
}

extern "C" fn on_done(user_data: *mut c_void, status: i32, err: *const c_char) {
if user_data.is_null() {
return;
}
let tx = unsafe { &*(user_data as *const mpsc::UnboundedSender<ApiEvent>) };
if status == 0 {
let _ = tx.send(ApiEvent::Done);
} else {
let msg = if err.is_null() {
"Apple Intelligence stream failed".to_string()
} else {
unsafe { CStr::from_ptr(err) }.to_string_lossy().into_owned()
};
let _ = tx.send(ApiEvent::Error(msg));
}
}

pub async fn stream(
_cfg: &ProviderConfig,
query: &str,
system_prompt: &str,
tx: mpsc::UnboundedSender<ApiEvent>,
) {
// Fast-path availability check so we surface a clean error instead of
// hanging in the Swift Task when Apple Intelligence is off.
if unsafe { apple_intelligence_available() } != 0 {
let mut buf = [0u8; 256];
let n = unsafe {
apple_intelligence_unavailable_reason(buf.as_mut_ptr() as *mut c_char, buf.len())
};
let reason = std::str::from_utf8(&buf[..n])
.unwrap_or("Apple Intelligence is unavailable")
.to_string();
let _ = tx.send(ApiEvent::Error(reason));
return;
}

let instructions = match CString::new(system_prompt) {
Ok(s) => s,
Err(_) => {
let _ = tx.send(ApiEvent::Error(
"system_prompt contains a NUL byte".to_string(),
));
return;
}
};
let prompt = match CString::new(query) {
Ok(s) => s,
Err(_) => {
let _ = tx.send(ApiEvent::Error("query contains a NUL byte".to_string()));
return;
}
};

// The Swift bridge blocks the calling thread until the async Task
// finishes, so move it onto a blocking pool to keep the tokio runtime
// responsive.
let _ = tokio::task::spawn_blocking(move || {
let boxed: Box<mpsc::UnboundedSender<ApiEvent>> = Box::new(tx);
let user_data = Box::into_raw(boxed) as *mut c_void;
unsafe {
apple_intelligence_stream(
instructions.as_ptr(),
prompt.as_ptr(),
user_data,
on_delta,
on_done,
);
// Reclaim the sender so it's dropped exactly once.
drop(Box::from_raw(
user_data as *mut mpsc::UnboundedSender<ApiEvent>,
));
}
})
.await;
}
Loading