From f844b87680eaecba8e7ae246b050ddb54e0126ea Mon Sep 17 00:00:00 2001 From: jack Date: Fri, 24 Apr 2026 14:22:44 +0800 Subject: [PATCH] feat: implement multi-line paste functionality with reference storage --- internal/tui/paste_store.go | 123 ++++++++++++++++++++++++++++++++++++ internal/tui/tui.go | 118 ++++++++++++++++++++++++++-------- 2 files changed, 214 insertions(+), 27 deletions(-) create mode 100644 internal/tui/paste_store.go diff --git a/internal/tui/paste_store.go b/internal/tui/paste_store.go new file mode 100644 index 0000000..ce4195a --- /dev/null +++ b/internal/tui/paste_store.go @@ -0,0 +1,123 @@ +package tui + +import ( + "fmt" + "strings" + "sync" +) + +// minLinesForReference is the minimum number of lines to trigger reference mode. +const minLinesForReference = 3 + +// PasteStore stores multi-line content and provides reference-based access. +// It is used to collapse long pasted content in the TUI while preserving +// the full content for the agent. +type PasteStore struct { + mu sync.RWMutex + contents map[int]string // id -> content + nextID int +} + +// NewPasteStore creates a new PasteStore. +func NewPasteStore() *PasteStore { + return &PasteStore{ + contents: make(map[int]string), + nextID: 1, + } +} + +// Store stores content and returns a reference ID. +// Returns 0 if content should not be stored (less than minLinesForReference lines). +func (ps *PasteStore) Store(content string) int { + if CountLines(content) < minLinesForReference { + return 0 + } + + ps.mu.Lock() + defer ps.mu.Unlock() + + id := ps.nextID + ps.nextID++ + ps.contents[id] = content + return id +} + +// StoreAndFormat stores content and returns the formatted reference string. +// If content is too short to collapse, returns the original content unchanged. +func (ps *PasteStore) StoreAndFormat(content string) string { + lineCount := CountLines(content) + id := ps.Store(content) + if id == 0 { + return content + } + return FormatRef(id, lineCount) +} + +// Get retrieves content by ID. +func (ps *PasteStore) Get(id int) (string, bool) { + ps.mu.RLock() + defer ps.mu.RUnlock() + + content, ok := ps.contents[id] + return content, ok +} + +// Clear removes all stored content. +func (ps *PasteStore) Clear() { + ps.mu.Lock() + defer ps.mu.Unlock() + + ps.contents = make(map[int]string) + ps.nextID = 1 +} + +// FormatRef returns a reference string for display. +// Example: "[Pasted text #1 +10 lines]" +func FormatRef(id int, numLines int) string { + if numLines == 0 { + return fmt.Sprintf("[Pasted text #%d]", id) + } + return fmt.Sprintf("[Pasted text #%d +%d lines]", id, numLines) +} + +// NormalizeLineEndings converts \r\n and standalone \r to \n so that +// line-counting and display work correctly regardless of the source platform. +func NormalizeLineEndings(s string) string { + s = strings.ReplaceAll(s, "\r\n", "\n") + s = strings.ReplaceAll(s, "\r", "\n") + return s +} + +// CountLines returns the number of lines in content (newlines + 1 if content is not empty). +func CountLines(content string) int { + if content == "" { + return 0 + } + return strings.Count(content, "\n") + 1 +} + +// ExpandRefs expands all paste references in the input with actual content from the store. +// This should be called before sending the prompt to the agent. +func (ps *PasteStore) ExpandRefs(input string) string { + ps.mu.RLock() + defer ps.mu.RUnlock() + + result := input + for id, content := range ps.contents { + // Try to match both formats + refWithLines := fmt.Sprintf("[Pasted text #%d +", id) + refSimple := fmt.Sprintf("[Pasted text #%d]", id) + + if idx := strings.Index(result, refWithLines); idx != -1 { + // Find the closing bracket after the prefix + if end := strings.Index(result[idx:], "]"); end != -1 { + fullRef := result[idx : idx+end+1] + result = strings.Replace(result, fullRef, content, 1) + } + } else if strings.Contains(result, refSimple) { + result = strings.Replace(result, refSimple, content, 1) + } + } + + return result +} diff --git a/internal/tui/tui.go b/internal/tui/tui.go index 083a4a8..c72f52b 100644 --- a/internal/tui/tui.go +++ b/internal/tui/tui.go @@ -92,6 +92,8 @@ type Model struct { activeModel string textareaLines int + pasteStore *PasteStore + todoStore *tools.TodoStore totalTokens int64 @@ -267,7 +269,9 @@ func newTextarea() textarea.Model { ta.Placeholder = "Type your prompt here..." ta.CharLimit = 0 ta.ShowLineNumbers = false - ta.SetHeight(1) + ta.DynamicHeight = true + ta.MinHeight = 1 + ta.MaxHeight = defaultMaxTextareaLines ta.Prompt = "> " st := ta.Styles() st.Focused.CursorLine = lipgloss.NewStyle() @@ -364,6 +368,7 @@ func NewModel(hasPrompt bool, pwd string, todoStore *tools.TodoStore) Model { pwd: pwd, history: loadHistory(), todoStore: todoStore, + pasteStore: NewPasteStore(), lines: initialLines, envLabel: "Local", approvalMode: ModeManual, // Default to manual approval mode @@ -447,20 +452,14 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen switch msg := msg.(type) { case tea.PasteMsg: - // Forward bracketed paste (Ctrl+Shift+V / right-click paste) to textarea if m.inputActive() { - var cmd tea.Cmd - m.textarea, cmd = m.textarea.Update(msg) - cmds = append(cmds, cmd) + return m.handlePasteContent(NormalizeLineEndings(msg.Content)) } return m, tea.Batch(cmds...) case tea.ClipboardMsg: - // OSC52 clipboard read result — forward as paste to textarea if m.inputActive() && msg.Content != "" { - var cmd tea.Cmd - m.textarea, cmd = m.textarea.Update(tea.PasteMsg{Content: msg.Content}) - cmds = append(cmds, cmd) + return m.handlePasteContent(NormalizeLineEndings(msg.Content)) } return m, tea.Batch(cmds...) @@ -1124,7 +1123,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen m.cmdSuggestionActive = false m.cmdSuggestions = nil m.cmdSuggestionIndex = 0 - m.textareaLines = recalcLines(m.textarea.Value()) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) m.textarea.SetHeight(m.textareaLines) // Re-evaluate suggestions after setting value (may show new filtered list) m.updateSuggestions() @@ -1150,7 +1149,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen m.cmdSuggestionActive = false m.cmdSuggestions = nil m.cmdSuggestionIndex = 0 - m.textareaLines = recalcLines(m.textarea.Value()) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) m.textarea.SetHeight(m.textareaLines) // Re-evaluate: exact match clears suggestions, partial shows new list m.updateSuggestions() @@ -1161,6 +1160,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen } prompt := strings.TrimSpace(m.textarea.Value()) if prompt != "" { + // Expand paste references to full content for the agent + actualPrompt := m.pasteStore.ExpandRefs(prompt) appendHistory(prompt) if len(m.history) == 0 || m.history[len(m.history)-1] != prompt { m.history = append(m.history, prompt) @@ -1177,7 +1178,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen // Team: route input to viewed teammate if m.teamState.ViewMode == TeamViewTeammate && m.teamState.ViewingAgent != "" { - m.teamState.Manager.EnqueueUserMessage(m.teamState.ViewingAgent, prompt) + m.teamState.Manager.EnqueueUserMessage(m.teamState.ViewingAgent, actualPrompt) + // prompt already contains compact references from paste-time m.lines = append(m.lines, userPromptStyle.Render("> "+prompt)) m.refreshViewport() return m, tea.Batch(cmds...) @@ -1233,7 +1235,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen } if !m.agentDone && m.thinking { - m.pendingPrompts = append(m.pendingPrompts, prompt) + m.pendingPrompts = append(m.pendingPrompts, actualPrompt) + // prompt already contains compact references from paste-time m.lines = append(m.lines, userPromptStyle.Render("> "+prompt+" (queued)")) if m.ready { m.viewport.SetHeight(m.calcViewportHeight(true)) @@ -1249,12 +1252,12 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen m.promptStartTime = time.Now() // In Plan mode, send prompt directly (agent already has plan system prompt + read-only tools). - actualPrompt := prompt modePrefix := ">" if m.agentMode == ModePlanning { modePrefix = "📐" } + // prompt already contains compact references from paste-time m.lines = append(m.lines, "") m.lines = append(m.lines, userPromptStyle.Render(modePrefix+" "+prompt)) if m.ready { @@ -1273,7 +1276,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen var cmd tea.Cmd m.textarea, cmd = m.textarea.Update(tea.KeyPressMsg{Code: tea.KeyEnter}) cmds = append(cmds, cmd) - m.textareaLines = recalcLines(m.textarea.Value()) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) m.textarea.SetHeight(m.textareaLines) if m.ready { m.viewport.SetHeight(m.calcViewportHeight(m.inputActive())) @@ -1287,11 +1290,17 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen } return m, tea.Batch(cmds...) } + // Smart navigation: move cursor up within textarea first; + // only trigger history when already on the first line. + if m.textarea.Line() > 0 { + m.textarea.CursorUp() + return m, tea.Batch(cmds...) + } if m.historyIndex > 0 { m.historyIndex-- m.textarea.SetValue(m.history[m.historyIndex]) m.textarea.CursorEnd() - m.textareaLines = recalcLines(m.textarea.Value()) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) m.textarea.SetHeight(m.textareaLines) m.updateSuggestions() if m.ready { @@ -1307,6 +1316,12 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen } return m, tea.Batch(cmds...) } + // Smart navigation: move cursor down within textarea first; + // only trigger history when already on the last line. + if m.textarea.Line() < m.textarea.LineCount()-1 { + m.textarea.CursorDown() + return m, tea.Batch(cmds...) + } if m.historyIndex < len(m.history)-1 { m.historyIndex++ m.textarea.SetValue(m.history[m.historyIndex]) @@ -1315,7 +1330,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen m.historyIndex++ m.textarea.SetValue("") } - m.textareaLines = recalcLines(m.textarea.Value()) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) m.textarea.SetHeight(m.textareaLines) m.updateSuggestions() if m.ready { @@ -1374,7 +1389,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen var cmd tea.Cmd m.textarea, cmd = m.textarea.Update(msg) cmds = append(cmds, cmd) - m.textareaLines = recalcLines(m.textarea.Value()) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) m.textarea.SetHeight(m.textareaLines) m.updateSuggestions() if m.ready { @@ -1454,6 +1469,12 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen } m.textarea.SetWidth(inputWidth) + // Update textarea max height based on new terminal dimensions + newMaxHeight := calcMaxTextareaLines(m.height) + m.textarea.MaxHeight = newMaxHeight + m.textareaLines = recalcLines(m.textarea.Value(), newMaxHeight) + m.textarea.SetHeight(m.textareaLines) + vpH := m.calcViewportHeight(m.inputActive()) if !m.ready { m.viewport = viewport.New(viewport.WithWidth(mainWidth), viewport.WithHeight(vpH)) @@ -1532,7 +1553,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen // Replace input area with the received text m.textarea.SetValue(msg.Val) m.textarea.CursorEnd() - m.textareaLines = recalcLines(m.textarea.Value()) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) m.textarea.SetHeight(m.textareaLines) if m.ready { m.viewport.SetHeight(m.calcViewportHeight(m.inputActive())) @@ -1541,6 +1562,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen // Submit current input content to the agent prompt := strings.TrimSpace(m.textarea.Value()) if prompt != "" { + // Expand paste references to full content for the agent + actualPrompt := m.pasteStore.ExpandRefs(prompt) appendHistory(prompt) if len(m.history) == 0 || m.history[len(m.history)-1] != prompt { m.history = append(m.history, prompt) @@ -1559,7 +1582,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen } if !m.agentDone && m.thinking { - m.pendingPrompts = append(m.pendingPrompts, prompt) + m.pendingPrompts = append(m.pendingPrompts, actualPrompt) + // prompt already contains compact references from paste-time m.lines = append(m.lines, userPromptStyle.Render("> "+prompt+" (queued)")) m.refreshViewport() return m, tea.Batch(cmds...) @@ -1575,6 +1599,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen modePrefix = "📐" } + // prompt already contains compact references from paste-time m.lines = append(m.lines, "") m.lines = append(m.lines, userPromptStyle.Render(modePrefix+" "+prompt)) if m.ready { @@ -1583,7 +1608,7 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen m.viewport.GotoBottom() } cmds = append(cmds, func() tea.Msg { - return PromptSubmitMsg{Prompt: prompt} + return PromptSubmitMsg{Prompt: actualPrompt} }) cmds = append(cmds, m.spinner.Tick) } @@ -1641,7 +1666,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen switch e.Type { case string(session.EntryUser): m.lines = append(m.lines, "") - m.lines = append(m.lines, userPromptStyle.Render("> "+e.Content)) + displayContent := m.pasteStore.StoreAndFormat(NormalizeLineEndings(e.Content)) + m.lines = append(m.lines, userPromptStyle.Render("> "+displayContent)) case string(session.EntryAssistant): if e.Content != "" { rendered := e.Content @@ -1821,7 +1847,8 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen case UserPromptMsg: m.lines = append(m.lines, "") - m.lines = append(m.lines, userPromptStyle.Render("> "+sanitize(msg.Prompt))) + displayPrompt := m.pasteStore.StoreAndFormat(NormalizeLineEndings(sanitize(msg.Prompt))) + m.lines = append(m.lines, userPromptStyle.Render("> "+displayPrompt)) m.refreshViewport() case AgentTextMsg: @@ -2224,15 +2251,52 @@ func (m Model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { //nolint:funlen return m, tea.Batch(cmds...) } -const maxTextareaLines = 5 +const ( + defaultMaxTextareaLines = 5 + minTextareaLines = 3 + maxTextareaLinesCap = 20 +) + +// calcMaxTextareaLines dynamically computes the max textarea height based on +// terminal height. It returns a value between minTextareaLines and +// maxTextareaLinesCap, capped at 40% of the terminal height. +func calcMaxTextareaLines(termHeight int) int { + if termHeight <= 0 { + return defaultMaxTextareaLines + } + // Use up to 40% of terminal height for the input area, but keep within bounds. + n := termHeight * 2 / 5 + if n < minTextareaLines { + n = minTextareaLines + } + if n > maxTextareaLinesCap { + n = maxTextareaLinesCap + } + return n +} + +// handlePasteContent processes normalized paste content: stores long pastes +// as a reference in PasteStore, inserts the appropriate text into the textarea, +// and recalculates textarea/viewport height. +func (m Model) handlePasteContent(content string) (tea.Model, tea.Cmd) { + display := m.pasteStore.StoreAndFormat(content) + var cmd tea.Cmd + m.textarea, cmd = m.textarea.Update(tea.PasteMsg{Content: display}) + m.textareaLines = recalcLines(m.textarea.Value(), calcMaxTextareaLines(m.height)) + m.textarea.SetHeight(m.textareaLines) + if m.ready { + m.viewport.SetHeight(m.calcViewportHeight(m.inputActive())) + } + return m, cmd +} -func recalcLines(s string) int { +func recalcLines(s string, maxLines int) int { n := strings.Count(s, "\n") + 1 if n < 1 { n = 1 } - if n > maxTextareaLines { - n = maxTextareaLines + if n > maxLines { + n = maxLines } return n }