From 851fd1ae21e62ba1b80f1910cb56cde8da45b8a3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?N=C3=ADcolas=20Carvalho?= Date: Sun, 8 Mar 2026 17:32:23 -0300 Subject: [PATCH 1/3] replace F3 context push with read_terminal LLM tool Remove the F3 keybinding that manually pushed terminal output to the LLM. Instead, add a `read_terminal` local tool that Claude can call on demand to read the last 50 lines of terminal output. Update the system prompt to instruct Claude to proactively call this tool rather than asking the user to share output. Co-Authored-By: Claude Sonnet 4.6 --- src/llm/anthropic.rs | 9 +++++-- src/llm/mod.rs | 12 ++++++--- src/main.rs | 25 +++++-------------- src/tabs/llm.rs | 59 +++++++++++++++++--------------------------- src/tabs/terminal.rs | 4 +++ 5 files changed, 49 insertions(+), 60 deletions(-) diff --git a/src/llm/anthropic.rs b/src/llm/anthropic.rs index 152bc99..51ce1d4 100644 --- a/src/llm/anthropic.rs +++ b/src/llm/anthropic.rs @@ -144,6 +144,11 @@ fn all_tools() -> Value { }, "required": [] } + }, + { + "name": "read_terminal", + "description": "Read the recent output from the user's terminal. Returns the last lines of captured terminal output. Use this to understand what is currently happening in the SSH session.", + "input_schema": { "type": "object", "properties": {}, "required": [] } } ]) } @@ -317,8 +322,8 @@ impl LLMProvider for AnthropicProvider { // Dispatch by tool name. match name.as_str() { - "system_information" => { - debug!("[Anthropic] local tool: system_information"); + "system_information" | "read_terminal" => { + debug!("[Anthropic] local tool: {}", name); return Ok(LLMEvent::LocalTool { id, name, input, assistant_blocks }); } "run_command" => { diff --git a/src/llm/mod.rs b/src/llm/mod.rs index a08fd88..681df9b 100644 --- a/src/llm/mod.rs +++ b/src/llm/mod.rs @@ -154,10 +154,16 @@ pub trait LLMProvider: Send + Sync { pub const DEFAULT_SYSTEM_PROMPT: &str = "\ You are Sheesh, an expert SSH and Linux assistant embedded in a terminal manager. \ You help users understand and manage their remote SSH sessions. \ -When the user shares terminal output, analyse it and provide clear, actionable guidance. \ Prefer concise answers; use shell code blocks for any commands you suggest. \ -You can run commands directly on the user's remote session via the run_command tool — \ -always explain what a command does before proposing to run it."; +You have the following tools available:\n\ +- read_terminal: Read recent output from the user's terminal. \ + Call this proactively whenever the user asks about what is on the screen, \ + what is happening, or any question that requires knowing the current terminal state. \ + Do NOT ask the user to share output — just call the tool.\n\ +- run_command: Execute a shell command on the remote SSH session. \ + Always explain what a command does before proposing to run it.\n\ +- system_information: Get SSH connection details for the current session.\n\ +- read_file, list_dir, make_dir, touch_file: File operations on the remote host."; #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(default)] diff --git a/src/main.rs b/src/main.rs index 27a60cc..29eb0c9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -26,7 +26,7 @@ use app::{AppState, ConnectedFocus}; use config::{load_connections, save_connections, ssh_config_path}; use event::Action; use llm::{LLMConfig, build_provider}; -use tabs::{Tab, listing::ListingTab, llm::LLMTab, terminal::{CONTEXT_LINES, TerminalTab}}; +use tabs::{Tab, listing::ListingTab, llm::LLMTab, terminal::TerminalTab}; use ui::{keybindings::render_keybindings, theme::Theme}; /// Captures terminal output produced by a tool-call command and forwards it @@ -90,8 +90,11 @@ impl Sheesh { }; let provider = build_provider(&self.llm_config); + let output_log = terminal.output_log_arc(); self.terminal = Some(terminal); - self.llm = Some(LLMTab::new(provider, self.llm_config.system_prompt.clone(), conn.clone())); + let mut llm = LLMTab::new(provider, self.llm_config.system_prompt.clone(), conn.clone()); + llm.set_terminal_output(output_log); + self.llm = Some(llm); self.state = AppState::Connected { connection_name: name, focus: ConnectedFocus::Terminal, @@ -113,14 +116,6 @@ impl Sheesh { } } - fn send_context_to_llm(&mut self) { - if let (Some(terminal), Some(llm)) = (&self.terminal, &mut self.llm) { - let ctx = terminal.visible_text(CONTEXT_LINES); - let question = std::mem::take(&mut llm.input); - llm.send_with_context(ctx, question); - } - } - fn handle_event(&mut self, event: &crossterm::event::Event) -> bool { use crossterm::event::{KeyCode, KeyEvent}; @@ -140,14 +135,6 @@ impl Sheesh { self.cycle_focus(); return true; } - // F3 — send terminal context to LLM (stay on current panel) - crossterm::event::Event::Key(KeyEvent { - code: KeyCode::F(3), - .. - }) => { - self.send_context_to_llm(); - return true; - } // Mouse click — focus the panel that was clicked. // Do NOT return early for the terminal panel so the click also // reaches the terminal handler to start a text selection. @@ -295,7 +282,7 @@ impl Sheesh { let hints: Vec<(&str, &str)> = match &self.state { AppState::Listing => self.listing.key_hints(), AppState::Connected { focus, .. } => { - let mut hints = vec![("F2", "switch panel"), ("F3", "send context")]; + let mut hints = vec![("F2", "switch panel")]; let panel_hints: Vec<(&str, &str)> = match focus { ConnectedFocus::Terminal => self .terminal diff --git a/src/tabs/llm.rs b/src/tabs/llm.rs index 36cfcfc..1426058 100644 --- a/src/tabs/llm.rs +++ b/src/tabs/llm.rs @@ -1,4 +1,4 @@ -use std::sync::{Arc, mpsc}; +use std::sync::{Arc, Mutex, mpsc}; use crossterm::event::{Event, KeyCode, KeyEvent, KeyModifiers, MouseButton, MouseEventKind}; use ratatui::{ @@ -13,18 +13,12 @@ use crate::{ event::Action, llm::{ContentBlock, LLMEvent, LLMProvider, Message, RichMessage, Role, spawn_completion_rich}, ssh::SSHConnection, + tabs::terminal::CONTEXT_LINES, ui::theme::Theme, }; use super::Tab; -/// Display prefix added to messages that include terminal context. -const CONTEXT_DISPLAY_PREFIX: &str = "[terminal context shared]"; -/// Default question used when the user sends context without typing anything. -const CONTEXT_DEFAULT_QUESTION: &str = "What's happening here?"; -/// API prompt template: context block + question. -const CONTEXT_PROMPT_TEMPLATE: &str = "Terminal context:\n```\n{context}\n```\n\n{question}"; - /// (line_index, col) in the flattened history line buffer. type BufPos = (usize, usize); @@ -71,6 +65,8 @@ pub struct LLMTab { connection: SSHConnection, /// Maps each visible chat screen row → (build_lines index, byte offset in that string). last_visual_row_map: Vec<(usize, usize)>, + /// Shared reference to the terminal's raw output log (for the read_terminal tool). + terminal_output: Option>>>, } impl LLMTab { @@ -103,10 +99,15 @@ impl LLMTab { clipboard: arboard::Clipboard::new().ok(), connection, last_visual_row_map: vec![], + terminal_output: None, rich_history, } } + pub fn set_terminal_output(&mut self, output: Arc>>) { + self.terminal_output = Some(output); + } + /// Poll the channel for completed LLM responses. Call this each render frame. pub fn poll(&mut self) { while let Ok(event) = self.rx.try_recv() { @@ -261,6 +262,20 @@ impl LLMTab { if c.extra_options.is_empty() { "(none)".to_string() } else { c.extra_options.join(", ") }, ) } + "read_terminal" => { + match &self.terminal_output { + None => "Terminal output not available.".to_string(), + Some(log) => { + let log = log.lock().unwrap(); + if log.is_empty() { + "No terminal output captured yet.".to_string() + } else { + let start = log.len().saturating_sub(CONTEXT_LINES); + log[start..].join("") + } + } + } + } other => format!("Unknown local tool: {}", other), } } @@ -302,34 +317,6 @@ impl LLMTab { ); } - /// Prepend terminal context and send. - pub fn send_with_context(&mut self, context: String, question: String) { - if self.waiting { - return; - } - - let question = if question.trim().is_empty() { - CONTEXT_DEFAULT_QUESTION.to_string() - } else { - question - }; - let display = format!("{} {}", CONTEXT_DISPLAY_PREFIX, question); - let api_content = CONTEXT_PROMPT_TEMPLATE - .replace("{context}", &context) - .replace("{question}", &question); - - self.history.push(Message::user(&display)); - self.rich_history.push(RichMessage::user_text(api_content)); - self.waiting = true; - self.scroll_offset = 0; - self.status = "Waiting for response…".into(); - spawn_completion_rich( - Arc::clone(&self.provider), - self.rich_history.clone(), - self.tx.clone(), - ); - } - /// Build the flat list of rendered lines from the message history. fn build_lines(&self) -> Vec<(String, Option