Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 80 additions & 0 deletions crates/agentic-core/src/cloud.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
use reqwest::Client;
use serde::{Deserialize, Serialize};
use std::time::Duration;

#[derive(Serialize)]
struct OpenRouterRequest {
model: String,
messages: Vec<ChatMessage>,
max_tokens: u32,
}

#[derive(Serialize)]
struct ChatMessage {
role: String,
content: String,
}

#[derive(Deserialize)]
struct OpenRouterResponse {
choices: Vec<Choice>,
}

#[derive(Deserialize)]
struct Choice {
message: Message,
}

#[derive(Deserialize)]
struct Message {
content: String,
}

pub async fn call_cloud_model(
api_key: &str,
model: &str,
prompt: &str,
) -> Result<String, anyhow::Error> {
let client = Client::builder().timeout(Duration::from_secs(30)).build()?;

// Optimize prompt for concise responses
let optimized_prompt = format!(
"Please provide a concise, well-structured response to this inquiry. Keep it informative but focused:\n\n{}",
prompt
);

let request_body = OpenRouterRequest {
model: model.to_string(),
messages: vec![ChatMessage {
role: "user".to_string(),
content: optimized_prompt,
}],
max_tokens: 1024, // Reduced from 2048 for more concise responses
};

let response = client
.post("https://openrouter.ai/api/v1/chat/completions")
.header("Authorization", format!("Bearer {}", api_key))
.header("Content-Type", "application/json")
.json(&request_body)
.send()
.await?;

if !response.status().is_success() {
let status = response.status();
let error_text = response.text().await.unwrap_or_default();
return Err(anyhow::anyhow!(
"OpenRouter API error {}: {}",
status,
error_text
));
}

let openrouter_response: OpenRouterResponse = response.json().await?;

if let Some(choice) = openrouter_response.choices.first() {
Ok(choice.message.content.clone())
} else {
Err(anyhow::anyhow!("No response choices from OpenRouter API"))
}
}
1 change: 1 addition & 0 deletions crates/agentic-core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
//! - `settings`: Application configuration management
//! - `theme`: UI theming system

pub mod cloud;
pub mod models;
pub mod orchestrator;
pub mod settings;
Expand Down
26 changes: 12 additions & 14 deletions crates/agentic-core/src/orchestrator.rs
Original file line number Diff line number Diff line change
@@ -1,27 +1,25 @@
use crate::models::call_local_model;
use serde::Deserialize;

const ORCHESTRATOR_PROMPT: &str = r#"You are Ruixen, an inquisitive AI partner. Your job is to analyze the user's request and deconstruct it into three distinct lines of inquiry.
const ORCHESTRATOR_PROMPT: &str = r#"You are Ruixen, an inquisitive AI partner.

**Your Persona and Tone:**
- Your tone should be that of a collaborative partner.
- Each proposal should have a context statement followed by a curious question.
- Use phrases like "I wonder..." or "I'm wondering if..." for questions.
**Your Task:**
Generate 3 concise proposals about this query: "{query}"

**The Query to Explore:**
"{query}"
Each proposal must have TWO parts separated by a dash:
1. A brief context statement (1-2 sentences max)
2. A curious question starting with "I wonder" or "I'm wondering"

**Output Format:**
Generate exactly 3 proposals. Each proposal should be 2 sentences: a context statement followed by a curious question. Use a dash to separate them like this pattern:
Keep each proposal under 3 lines when displayed. Be thoughtful but concise.

"Context statement here - I wonder about this question?"
**Format:** Brief context - I wonder question?

Your response must be valid JSON:
**Output Format:**
{
"proposals": [
"First context statement - I wonder about this?",
"Second context statement - I'm wondering if that?",
"Third context statement - I wonder about something else?"
"Brief context about the topic - I wonder about this specific aspect?",
"Another brief context - I'm wondering if this related thing?",
"Third brief context - I wonder about this other angle?"
]
}
"#;
Expand Down
147 changes: 134 additions & 13 deletions crates/agentic-tui/src/ui/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use super::{
settings_modal::render_settings_modal,
};
use agentic_core::{
cloud,
models::{ModelValidator, OllamaModel, OpenRouterModel},
orchestrator,
settings::{Settings, ValidationError},
Expand Down Expand Up @@ -33,6 +34,7 @@ pub enum AppMode {
Orchestrating,
Revising,
Complete,
CoachingTip,
// TODO: Add About mode
}

Expand All @@ -48,6 +50,8 @@ pub enum AgentStatus {
LocalEndpointError,
CloudEndpointError,
Orchestrating,
Searching,
Complete,
}

#[derive(Debug)]
Expand All @@ -62,6 +66,7 @@ pub enum ValidationMessage {
pub enum AgentMessage {
ProposalsGenerated(Result<Vec<String>, anyhow::Error>),
RevisedProposalGenerated(Result<String, anyhow::Error>),
CloudSynthesisComplete(Result<String, anyhow::Error>),
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
Expand Down Expand Up @@ -118,6 +123,8 @@ pub struct App {
proposals: Vec<String>,
current_proposal_index: usize,
final_prompt: String,
cloud_response: String,
synthesis_scroll: u16,
}

impl App {
Expand All @@ -143,6 +150,8 @@ impl App {
proposals: Vec::new(),
current_proposal_index: 0,
final_prompt: String::new(),
cloud_response: String::new(),
synthesis_scroll: 0,
}
}

Expand Down Expand Up @@ -198,14 +207,14 @@ impl App {
let prefix = if is_selected { "> " } else { " " };
let number = format!("{}. ", i + 1);

// Split proposal into sentences (max 2) and wrap
let sentences: Vec<&str> = proposal.split(". ").take(2).collect();

let proposal_text = if sentences.len() > 1 {
format!("{} {}", sentences[0], sentences.get(1).unwrap_or(&""))
} else {
proposal.clone()
};
// Clean up the proposal text - remove template artifacts
let proposal_text = proposal
.replace("Context statement: ", "")
.replace("Another context: ", "")
.replace("Third context: ", "")
.replace("Context statement - ", "")
.replace("Another context - ", "")
.replace("Third context - ", "");

let style = if is_selected {
self.theme.ratatui_style(Element::Accent)
Expand Down Expand Up @@ -238,6 +247,55 @@ impl App {
frame.render_widget(footer, chunks[2]);
}

fn render_coaching_tip_modal(&self, frame: &mut ratatui::Frame, area: Rect) {
use ratatui::{prelude::Alignment, text::Line, widgets::Paragraph};

let block = Block::default()
.title(" Coaching Tip ")
.borders(Borders::ALL)
.style(self.theme.ratatui_style(Element::Active));

let inner_area = block.inner(area);
frame.render_widget(block, area);

// Split area: message + tips
let chunks = Layout::default()
.direction(Direction::Vertical)
.constraints([
Constraint::Min(5), // Main message (flexible)
Constraint::Length(3), // Tips footer
])
.split(inner_area);

// Main coaching message with tips
let message_text = vec![
Line::from(""),
Line::from("Ruixen is having a tough time with this abstract query."),
Line::from(""),
Line::from(":: Smaller local models work best with clear and concrete questions."),
Line::from(""),
Line::from(":: Try a more direct question, add specific context, or break"),
Line::from(" the query down into smaller steps."),
Line::from(""),
];

let message = Paragraph::new(message_text)
.alignment(Alignment::Center)
.style(self.theme.ratatui_style(Element::Text))
.wrap(Wrap { trim: true });

frame.render_widget(message, chunks[0]);

// Navigation footer
let footer_text = "Press [ESC] to return.";
let footer = Paragraph::new(footer_text)
.alignment(Alignment::Center)
.style(self.theme.ratatui_style(Element::Inactive))
.wrap(Wrap { trim: true });

frame.render_widget(footer, chunks[1]);
}

pub async fn run(&mut self, terminal: &mut Terminal<CrosstermBackend<Stdout>>) -> Result<()> {
while !self.should_quit {
self.draw(terminal)?;
Expand Down Expand Up @@ -391,11 +449,31 @@ impl App {
);
frame.render_widget(Clear, modal_area);
self.render_synthesize_modal(frame, modal_area);
} else if self.mode == AppMode::CoachingTip {
// Render the Coaching Tip modal
let size = frame.size();
let modal_width = (((size.width as f32) * 0.7).round() as u16)
.clamp(50, 70)
.min(size.width);
let modal_height = (((size.height as f32) * 0.4).round() as u16)
.clamp(10, 15)
.min(size.height);
let modal_area = Rect::new(
(size.width.saturating_sub(modal_width)) / 2,
(size.height.saturating_sub(modal_height)) / 2,
modal_width,
modal_height,
);
frame.render_widget(Clear, modal_area);
self.render_coaching_tip_modal(frame, modal_area);
} else if self.mode == AppMode::Complete {
let block = Block::default().title("Final Prompt").borders(Borders::ALL);
let paragraph = Paragraph::new(self.final_prompt.as_str())
let block = Block::default()
.title("Synthesis Complete")
.borders(Borders::ALL);
let paragraph = Paragraph::new(self.cloud_response.as_str())
.block(block)
.wrap(Wrap { trim: true });
.wrap(Wrap { trim: true })
.scroll((self.synthesis_scroll, 0));
frame.render_widget(paragraph, app_chunks[1]);
} else {
render_chat(
Expand Down Expand Up @@ -516,6 +594,8 @@ impl App {
self.agent_status = AgentStatus::Ready;
}
AgentMessage::ProposalsGenerated(Err(_e)) => {
// Show coaching tip instead of just failing silently
self.mode = AppMode::CoachingTip;
self.agent_status = AgentStatus::Ready;
}
AgentMessage::RevisedProposalGenerated(Ok(proposal)) => {
Expand All @@ -527,6 +607,16 @@ impl App {
// TODO: Set error state and display to user
self.agent_status = AgentStatus::Ready;
}
AgentMessage::CloudSynthesisComplete(Ok(response)) => {
self.cloud_response = response;
self.mode = AppMode::Complete;
self.agent_status = AgentStatus::Complete;
}
AgentMessage::CloudSynthesisComplete(Err(_e)) => {
// Show coaching tip for cloud API failures
self.mode = AppMode::CoachingTip;
self.agent_status = AgentStatus::Ready;
}
}
}

Expand Down Expand Up @@ -768,12 +858,12 @@ impl App {
}
}
KeyCode::Enter => {
// Synthesize - use selected proposal
// Synthesize - send proposal to cloud for synthesis
if let Some(proposal) =
self.proposals.get(self.current_proposal_index)
{
self.final_prompt = proposal.clone();
self.mode = AppMode::Complete;
self.handle_cloud_synthesis();
}
}
KeyCode::Char('e') => {
Expand Down Expand Up @@ -806,11 +896,27 @@ impl App {
_ => {}
},
AppMode::Complete => match key.code {
KeyCode::Up => {
self.synthesis_scroll = self.synthesis_scroll.saturating_sub(1);
}
KeyCode::Down => {
self.synthesis_scroll = self.synthesis_scroll.saturating_add(1);
}
KeyCode::Enter | KeyCode::Esc => {
self.mode = AppMode::Normal;
self.final_prompt.clear();
self.proposals.clear();
self.current_proposal_index = 0;
self.cloud_response.clear();
self.synthesis_scroll = 0;
self.agent_status = AgentStatus::Ready;
}
_ => {}
},
AppMode::CoachingTip => match key.code {
KeyCode::Enter | KeyCode::Esc => {
// Return to chat mode to try again
self.mode = AppMode::Chat;
}
_ => {}
},
Expand Down Expand Up @@ -867,6 +973,21 @@ impl App {
self.edit_buffer.clear();
}

fn handle_cloud_synthesis(&mut self) {
// Set status to searching and trigger cloud API call
self.agent_status = AgentStatus::Searching;

let prompt = self.final_prompt.clone();
let api_key = self.settings.api_key.clone();
let model = self.settings.cloud_model.clone();
let tx = self.agent_tx.clone();

tokio::spawn(async move {
let result = cloud::call_cloud_model(&api_key, &model, &prompt).await;
let _ = tx.send(AgentMessage::CloudSynthesisComplete(result));
});
}

fn handle_slash_command(&mut self, command: &str) {
match command {
"/setting" | "/settings" => {
Expand Down
Loading