diff --git a/crates/agentic-core/src/lib.rs b/crates/agentic-core/src/lib.rs index 5220eb2..6daed70 100644 --- a/crates/agentic-core/src/lib.rs +++ b/crates/agentic-core/src/lib.rs @@ -11,6 +11,7 @@ //! - `theme`: UI theming system pub mod models; +pub mod orchestrator; pub mod settings; pub mod theme; diff --git a/crates/agentic-core/src/models.rs b/crates/agentic-core/src/models.rs index 1f2daaf..9506c87 100644 --- a/crates/agentic-core/src/models.rs +++ b/crates/agentic-core/src/models.rs @@ -279,6 +279,49 @@ impl ModelValidator { } } +#[derive(Serialize)] +struct LocalGenerationRequest<'a> { + model: &'a str, + prompt: &'a str, + stream: bool, +} + +#[derive(Deserialize)] +struct LocalGenerationResponse { + response: String, +} + +pub async fn call_local_model( + endpoint: &str, + model: &str, + prompt: &str, +) -> Result { + let client = Client::new(); + let url = if endpoint.starts_with("http") { + format!("{}/api/generate", endpoint) + } else { + format!("http://{}/api/generate", endpoint) + }; + + let payload = LocalGenerationRequest { + model, + prompt, + stream: false, + }; + + let response = client.post(&url).json(&payload).send().await?; + + if response.status().is_success() { + let gen_response: LocalGenerationResponse = response.json().await?; + Ok(gen_response.response) + } else { + Err(anyhow::anyhow!( + "Failed to get response from local model. Status: {}", + response.status() + )) + } +} + impl Default for ModelValidator { fn default() -> Self { Self::new() diff --git a/crates/agentic-core/src/orchestrator.rs b/crates/agentic-core/src/orchestrator.rs new file mode 100644 index 0000000..6b17427 --- /dev/null +++ b/crates/agentic-core/src/orchestrator.rs @@ -0,0 +1,113 @@ +use crate::models::call_local_model; +use serde::Deserialize; + +const ORCHESTRATOR_PROMPT: &str = r#"You are Ruixen, an inquisitive AI partner. Your job is to analyze the user's request and deconstruct it into three distinct lines of inquiry. + +**Your Persona and Tone:** +- Your tone should be that of a collaborative partner. +- Each proposal should have a context statement followed by a curious question. +- Use phrases like "I wonder..." or "I'm wondering if..." for questions. + +**The Query to Explore:** +"{query}" + +**Output Format:** +Generate exactly 3 proposals. Each proposal should be 2 sentences: a context statement followed by a curious question. Use a dash to separate them like this pattern: + +"Context statement here - I wonder about this question?" + +Your response must be valid JSON: +{ + "proposals": [ + "First context statement - I wonder about this?", + "Second context statement - I'm wondering if that?", + "Third context statement - I wonder about something else?" + ] +} +"#; + +const REVISE_PROMPT: &str = r#"You are an expert prompt engineer. A user wants to revise a prompt proposal. + +Original Proposal: "{proposal}" +User's Revision: "{revision}" + +Your task is to integrate the user's revision into the original proposal to create a new, single, improved prompt. +The new prompt should be self-contained and ready to use. + +Format your response as a JSON object with a single key "proposal" which is a string. +Example: +{ + "proposal": "This is the new, revised prompt." +} +"#; + +#[derive(Deserialize, Debug)] +struct ProposalsResponse { + proposals: Vec, +} + +#[derive(Deserialize, Debug)] +struct ReviseResponse { + proposal: String, +} + +pub async fn generate_proposals( + query: &str, + endpoint: &str, + model: &str, +) -> Result, anyhow::Error> { + let prompt = ORCHESTRATOR_PROMPT.replace("{query}", query); + + // Debug: Write the prompt to a file so we can see what's being sent + std::fs::write("/tmp/debug_prompt.txt", &prompt).ok(); + + let response_str = call_local_model(endpoint, model, &prompt).await?; + + // Debug: Write the response to a file so we can see what came back + std::fs::write("/tmp/debug_response.txt", &response_str).ok(); + + // Attempt to find the start of the JSON object + if let Some(json_start) = response_str.find("{") { + let json_str = &response_str[json_start..]; + match serde_json::from_str::(json_str) { + Ok(response) => Ok(response.proposals), + Err(e) => { + // Debug: Write the JSON we tried to parse + std::fs::write("/tmp/debug_json.txt", json_str).ok(); + Err(anyhow::anyhow!( + "Failed to parse proposals JSON: {} | JSON: {}", + e, + json_str + )) + } + } + } else { + Err(anyhow::anyhow!( + "No JSON object found in model response: {}", + response_str + )) + } +} + +pub async fn revise_proposal( + proposal: &str, + revision: &str, + endpoint: &str, + model: &str, +) -> Result { + let prompt = REVISE_PROMPT + .replace("{proposal}", proposal) + .replace("{revision}", revision); + let response_str = call_local_model(endpoint, model, &prompt).await?; + + // Attempt to find the start of the JSON object + if let Some(json_start) = response_str.find("{") { + let json_str = &response_str[json_start..]; + match serde_json::from_str::(json_str) { + Ok(response) => Ok(response.proposal), + Err(e) => Err(anyhow::anyhow!("Failed to parse revision JSON: {}", e)), + } + } else { + Err(anyhow::anyhow!("No JSON object found in model response")) + } +} diff --git a/crates/agentic-tui/src/ui/app.rs b/crates/agentic-tui/src/ui/app.rs index 31e5939..a3ce22d 100644 --- a/crates/agentic-tui/src/ui/app.rs +++ b/crates/agentic-tui/src/ui/app.rs @@ -7,6 +7,7 @@ use super::{ }; use agentic_core::{ models::{ModelValidator, OllamaModel, OpenRouterModel}, + orchestrator, settings::{Settings, ValidationError}, theme::{Element, Theme}, }; @@ -14,7 +15,7 @@ use anyhow::Result; use crossterm::event::{self, Event, KeyCode, KeyEventKind, KeyModifiers}; use ratatui::{ prelude::{Constraint, CrosstermBackend, Direction, Layout, Rect, Terminal}, - widgets::{Block, Borders, Clear}, + widgets::{Block, Borders, Clear, Paragraph, Wrap}, }; use std::io::Stdout; use std::time::Duration; @@ -29,6 +30,9 @@ pub enum AppMode { EditingApiKey, SelectingLocalModel, SelectingCloudModel, + Orchestrating, + Revising, + Complete, // TODO: Add About mode } @@ -43,6 +47,7 @@ pub enum AgentStatus { ValidatingCloud, LocalEndpointError, CloudEndpointError, + Orchestrating, } #[derive(Debug)] @@ -53,6 +58,12 @@ pub enum ValidationMessage { CloudModelsLoaded(Result, anyhow::Error>), } +#[derive(Debug)] +pub enum AgentMessage { + ProposalsGenerated(Result, anyhow::Error>), + RevisedProposalGenerated(Result), +} + #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] pub enum SettingsSelection { #[default] @@ -96,17 +107,23 @@ pub struct App { agent_status: AgentStatus, settings_selection: SettingsSelection, validation_rx: Option>, + agent_rx: mpsc::UnboundedReceiver, + agent_tx: mpsc::UnboundedSender, edit_buffer: String, available_local_models: Vec, available_cloud_models: Vec, selected_model_index: usize, current_page: usize, models_per_page: usize, + proposals: Vec, + current_proposal_index: usize, + final_prompt: String, } impl App { pub fn new(settings: Settings) -> Self { let theme = Theme::new(settings.theme); + let (agent_tx, agent_rx) = mpsc::unbounded_channel(); Self { should_quit: false, theme, @@ -115,13 +132,110 @@ impl App { agent_status: AgentStatus::NotReady, settings_selection: SettingsSelection::default(), validation_rx: None, + agent_rx, + agent_tx, edit_buffer: String::new(), available_local_models: Vec::new(), available_cloud_models: Vec::new(), selected_model_index: 0, current_page: 0, models_per_page: 10, // Show 10 models per page + proposals: Vec::new(), + current_proposal_index: 0, + final_prompt: String::new(), + } + } + + fn render_synthesize_modal(&self, frame: &mut ratatui::Frame, area: Rect) { + use ratatui::{ + prelude::Alignment, + text::{Line, Span}, + widgets::Paragraph, + }; + + let block = Block::default() + .title(" Synthesize Knowledge ") + .borders(Borders::ALL) + .style(self.theme.ratatui_style(Element::Active)); + + let inner_area = block.inner(area); + frame.render_widget(block, area); + + if self.proposals.is_empty() { + let loading = Paragraph::new("Generating proposals...") + .alignment(Alignment::Center) + .style(self.theme.ratatui_style(Element::Info)); + frame.render_widget(loading, inner_area); + return; } + + // Header text + let header = + Paragraph::new("Ruixen has a few lines of inquiry. Select the best one to pursue:") + .alignment(Alignment::Left) + .style(self.theme.ratatui_style(Element::Text)) + .wrap(Wrap { trim: true }); + + // Split area: header + proposals + footer + let chunks = Layout::default() + .direction(Direction::Vertical) + .constraints([ + Constraint::Length(3), // Header + Constraint::Min(6), // Proposals (flexible) + Constraint::Length(3), // Footer + ]) + .split(inner_area); + + frame.render_widget(header, chunks[0]); + + // Render proposals + let proposal_lines: Vec = self + .proposals + .iter() + .enumerate() + .flat_map(|(i, proposal)| { + let is_selected = i == self.current_proposal_index; + let prefix = if is_selected { "> " } else { " " }; + let number = format!("{}. ", i + 1); + + // Split proposal into sentences (max 2) and wrap + let sentences: Vec<&str> = proposal.split(". ").take(2).collect(); + + let proposal_text = if sentences.len() > 1 { + format!("{} {}", sentences[0], sentences.get(1).unwrap_or(&"")) + } else { + proposal.clone() + }; + + let style = if is_selected { + self.theme.ratatui_style(Element::Accent) + } else { + self.theme.ratatui_style(Element::Text) + }; + + vec![ + Line::from(vec![ + Span::styled(format!("{}{}", prefix, number), style), + Span::styled(proposal_text, style), + ]), + Line::from(""), // Empty line between proposals + ] + }) + .collect(); + + let proposals_paragraph = Paragraph::new(proposal_lines) + .style(self.theme.ratatui_style(Element::Text)) + .wrap(Wrap { trim: true }); + + frame.render_widget(proposals_paragraph, chunks[1]); + + // Footer with controls + let footer_text = "[Enter] Synthesize | [E]dit Selected | [ESC] Cancel"; + let footer = Paragraph::new(footer_text) + .alignment(Alignment::Center) + .style(self.theme.ratatui_style(Element::Inactive)); + + frame.render_widget(footer, chunks[2]); } pub async fn run(&mut self, terminal: &mut Terminal>) -> Result<()> { @@ -139,6 +253,15 @@ impl App { self.handle_validation_message(msg); } + // Handle agent messages from background tasks + let mut agent_messages = Vec::new(); + while let Ok(msg) = self.agent_rx.try_recv() { + agent_messages.push(msg); + } + for msg in agent_messages { + self.handle_agent_message(msg); + } + // Handle keyboard events (non-blocking with timeout) if event::poll(Duration::from_millis(100))? { self.handle_events()?; @@ -251,6 +374,29 @@ impl App { &self.edit_buffer, ); } + } else if self.mode == AppMode::Orchestrating { + // Render the Synthesize Knowledge modal + let size = frame.size(); + let modal_width = (((size.width as f32) * 0.8).round() as u16) + .clamp(50, 80) + .min(size.width); + let modal_height = (((size.height as f32) * 0.6).round() as u16) + .clamp(15, 25) + .min(size.height); + let modal_area = Rect::new( + (size.width.saturating_sub(modal_width)) / 2, + (size.height.saturating_sub(modal_height)) / 2, + modal_width, + modal_height, + ); + frame.render_widget(Clear, modal_area); + self.render_synthesize_modal(frame, modal_area); + } else if self.mode == AppMode::Complete { + let block = Block::default().title("Final Prompt").borders(Borders::ALL); + let paragraph = Paragraph::new(self.final_prompt.as_str()) + .block(block) + .wrap(Wrap { trim: true }); + frame.render_widget(paragraph, app_chunks[1]); } else { render_chat( frame, @@ -361,6 +507,29 @@ impl App { } } + fn handle_agent_message(&mut self, message: AgentMessage) { + match message { + AgentMessage::ProposalsGenerated(Ok(proposals)) => { + self.proposals = proposals; + self.current_proposal_index = 0; + self.mode = AppMode::Orchestrating; + self.agent_status = AgentStatus::Ready; + } + AgentMessage::ProposalsGenerated(Err(_e)) => { + self.agent_status = AgentStatus::Ready; + } + AgentMessage::RevisedProposalGenerated(Ok(proposal)) => { + self.proposals[self.current_proposal_index] = proposal; + self.mode = AppMode::Orchestrating; + self.agent_status = AgentStatus::Ready; + } + AgentMessage::RevisedProposalGenerated(Err(_e)) => { + // TODO: Set error state and display to user + self.agent_status = AgentStatus::Ready; + } + } + } + fn check_both_validations_complete(&mut self) { // If we're still in ValidatingCloud state and receive a successful cloud validation, // it means both local and cloud are good @@ -587,6 +756,64 @@ impl App { } _ => {} }, + AppMode::Orchestrating => match key.code { + KeyCode::Up => { + if self.current_proposal_index > 0 { + self.current_proposal_index -= 1; + } + } + KeyCode::Down => { + if self.current_proposal_index + 1 < self.proposals.len() { + self.current_proposal_index += 1; + } + } + KeyCode::Enter => { + // Synthesize - use selected proposal + if let Some(proposal) = + self.proposals.get(self.current_proposal_index) + { + self.final_prompt = proposal.clone(); + self.mode = AppMode::Complete; + } + } + KeyCode::Char('e') => { + // Edit selected proposal + self.mode = AppMode::Revising; + self.edit_buffer.clear(); + } + KeyCode::Esc => { + // Cancel and return to normal mode + self.mode = AppMode::Normal; + self.proposals.clear(); + self.current_proposal_index = 0; + } + _ => {} + }, + AppMode::Revising => match key.code { + KeyCode::Enter => { + self.handle_revision(); + } + KeyCode::Esc => { + self.mode = AppMode::Orchestrating; + self.edit_buffer.clear(); + } + KeyCode::Backspace => { + self.edit_buffer.pop(); + } + KeyCode::Char(c) => { + self.edit_buffer.push(c); + } + _ => {} + }, + AppMode::Complete => match key.code { + KeyCode::Enter | KeyCode::Esc => { + self.mode = AppMode::Normal; + self.final_prompt.clear(); + self.proposals.clear(); + self.current_proposal_index = 0; + } + _ => {} + }, } } } @@ -602,15 +829,44 @@ impl App { self.handle_slash_command(&message); } else { // Handle regular chat message - // TODO: Implement actual chat processing - // For now, just clear the input - println!("Chat message: {}", message); + self.agent_status = AgentStatus::Orchestrating; + let settings = self.settings.clone(); + let tx = self.agent_tx.clone(); + tokio::spawn(async move { + let result = orchestrator::generate_proposals( + &message, + &settings.endpoint, + &settings.local_model, + ) + .await; + let _ = tx.send(AgentMessage::ProposalsGenerated(result)); + }); } // Clear input after processing self.edit_buffer.clear(); } + fn handle_revision(&mut self) { + let revision = self.edit_buffer.trim().to_string(); + if let Some(current_proposal) = self.proposals.get(self.current_proposal_index).cloned() { + self.agent_status = AgentStatus::Orchestrating; + let settings = self.settings.clone(); + let tx = self.agent_tx.clone(); + tokio::spawn(async move { + let result = orchestrator::revise_proposal( + ¤t_proposal, + &revision, + &settings.endpoint, + &settings.local_model, + ) + .await; + let _ = tx.send(AgentMessage::RevisedProposalGenerated(result)); + }); + } + self.edit_buffer.clear(); + } + fn handle_slash_command(&mut self, command: &str) { match command { "/setting" | "/settings" => { diff --git a/crates/agentic-tui/src/ui/chat.rs b/crates/agentic-tui/src/ui/chat.rs index 13eeef7..6999ec0 100644 --- a/crates/agentic-tui/src/ui/chat.rs +++ b/crates/agentic-tui/src/ui/chat.rs @@ -117,30 +117,26 @@ pub fn render_chat( frame.render_widget(logo_paragraph, vertical_chunks[1]); - // Status-based message + // Status-based message - show one error at a time, prioritizing local > cloud > api key let (status_text, status_style) = match agent_status { - AgentStatus::Ready => ( - "Press [ENTER] to Start Ruixen", - theme.ratatui_style(Element::Accent), - ), AgentStatus::LocalEndpointError => ( - "⚠️ Local endpoint error - Check settings [S]", + "Local not ready - see [S]ettings", theme.ratatui_style(Element::Warning), ), - AgentStatus::CloudEndpointError => ( - "⚠️ Cloud endpoint error - Check settings [S]", + AgentStatus::CheckLocalModel => ( + "Local not ready - see [S]ettings", theme.ratatui_style(Element::Warning), ), - AgentStatus::CheckLocalModel => ( - "⚠️ Local model not configured - Check settings [S]", + AgentStatus::CloudEndpointError => ( + "Cloud not ready - see [S]ettings", theme.ratatui_style(Element::Warning), ), AgentStatus::CheckCloudModel => ( - "⚠️ Cloud model not configured - Check settings [S]", + "Cloud not ready - see [S]ettings", theme.ratatui_style(Element::Warning), ), AgentStatus::CheckApiKey => ( - "⚠️ API key not configured - Check settings [S]", + "API Key not ready - see [S]ettings", theme.ratatui_style(Element::Warning), ), AgentStatus::ValidatingLocal => ( @@ -152,8 +148,8 @@ pub fn render_chat( theme.ratatui_style(Element::Info), ), _ => ( - "Press [ENTER] when local and cloud models are ready", - theme.ratatui_style(Element::Inactive), + "Press [ENTER] to start Ruixen", + theme.ratatui_style(Element::Accent), ), }; diff --git a/crates/agentic-tui/src/ui/header.rs b/crates/agentic-tui/src/ui/header.rs index b9dbde9..a1d0988 100644 --- a/crates/agentic-tui/src/ui/header.rs +++ b/crates/agentic-tui/src/ui/header.rs @@ -100,6 +100,9 @@ fn build_smart_status_with_color(status: AgentStatus, settings: &Settings) -> (S local_display, cloud_display ) } + AgentStatus::Orchestrating => { + format!("Ruixen :: [ORCHESTRATING] :: {}", cloud_display) + } }; let color = match status { @@ -113,6 +116,7 @@ fn build_smart_status_with_color(status: AgentStatus, settings: &Settings) -> (S } } AgentStatus::ValidatingLocal | AgentStatus::ValidatingCloud => Color::Yellow, // Testing in progress + AgentStatus::Orchestrating => Color::Cyan, AgentStatus::LocalEndpointError | AgentStatus::CloudEndpointError => Color::Red, // Connection failed _ => Color::Red, // Other validation failed };