Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# This needs to sync with aider/help_pats.py

include requirements/requirements.in
include requirements/requirements-dev.in
include requirements/requirements-help.in
include requirements/requirements-playwright.in

global-exclude .DS_Store

recursive-exclude aider/website/examples *
Expand Down
2 changes: 1 addition & 1 deletion aider/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from packaging import version

__version__ = "0.90.5.dev"
__version__ = "0.90.6.dev"
safe_version = __version__

try:
Expand Down
4 changes: 2 additions & 2 deletions aider/coders/agent_coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ def _get_agent_config(self):
config["tools_excludelist"] = []

if "include_context_blocks" in config:
self.allowed_context_blocks = set(config["context_blocks"])
self.allowed_context_blocks = set(config["include_context_blocks"])
else:
self.allowed_context_blocks = {
"context_summary",
Expand Down Expand Up @@ -1202,7 +1202,7 @@ async def reply_completed(self):
self.tool_usage_history = []
if self.files_edited_by_tools:
_ = await self.auto_commit(self.files_edited_by_tools)
return True
return False

# Since we are no longer suppressing, the partial_response_content IS the final content.
# We might want to update it to the processed_content (without tool calls) if we don't
Expand Down
8 changes: 8 additions & 0 deletions aider/coders/architect_coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,14 @@ async def reply_completed(self):
kwargs["cache_prompts"] = False
kwargs["num_cache_warming_pings"] = 0
kwargs["summarize_from_coder"] = False
kwargs["mcp_servers"] = [] # Empty to skip initialization

coder = await Coder.create(**kwargs)
# Transfer MCP state to avoid re-initialization
coder.mcp_servers = self.mcp_servers
coder.mcp_tools = self.mcp_tools
# Transfer TUI app weak reference
coder.tui = self.tui

new_kwargs = dict(io=self.io, from_coder=self)
new_kwargs.update(kwargs)
Expand Down
6 changes: 5 additions & 1 deletion aider/coders/base_coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -2845,7 +2845,11 @@ def add_assistant_reply_to_cur_messages(self):
msg["reasoning_content"] = self.partial_response_reasoning_content

# Only add a message if it's not empty.
if msg is not None:
if msg is not None and (
msg.get("content", None)
or msg.get("tool_calls", None)
or msg.get("function_call", None)
):
self.cur_messages.append(msg)

def get_file_mentions(self, content, ignore_current=False):
Expand Down
39 changes: 27 additions & 12 deletions aider/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -1606,19 +1606,29 @@ async def _generic_chat_command(self, args, edit_format, placeholder=None):

from aider.coders.base_coder import Coder

user_msg = args

original_main_model = self.coder.main_model
original_edit_format = self.coder.edit_format
kwargs = {
"io": self.coder.io,
"from_coder": self.coder,
"edit_format": edit_format,
"summarize_from_coder": False,
"num_cache_warming_pings": 0,
"aider_commit_hashes": self.coder.aider_commit_hashes,
"args": self.coder.args,
}

kwargs["mcp_servers"] = [] # Empty to skip initialization

coder = await Coder.create(**kwargs)
# Transfer MCP state to avoid re-initialization
coder.mcp_servers = self.coder.mcp_servers
coder.mcp_tools = self.coder.mcp_tools
# Transfer TUI app weak reference
coder.tui = self.coder.tui

coder = await Coder.create(
io=self.io,
from_coder=self.coder,
edit_format=edit_format,
summarize_from_coder=False,
num_cache_warming_pings=0,
aider_commit_hashes=self.coder.aider_commit_hashes,
)

user_msg = args
await coder.generate(user_message=user_msg, preproc=False)
self.coder.aider_commit_hashes = coder.aider_commit_hashes

Expand Down Expand Up @@ -1649,7 +1659,7 @@ def get_help_md(self):
res += "\n"
return res

def cmd_voice(self, args):
async def cmd_voice(self, args):
"Record and transcribe voice input"

if not self.voice:
Expand All @@ -1667,14 +1677,19 @@ def cmd_voice(self, args):
return

try:
text = self.voice.record_and_transcribe(None, language=self.voice_language)
self.coder.io.update_spinner("Recording...")
text = await self.voice.record_and_transcribe(None, language=self.voice_language)
except litellm.OpenAIError as err:
self.io.tool_error(f"Unable to use OpenAI whisper model: {err}")
return

if text:
self.io.placeholder = text

if self.coder.tui and self.coder.tui():
self.coder.tui().set_input_value(text)
self.coder.tui().refresh()

def cmd_paste(self, args):
"""Paste image/text from the clipboard into the chat.\
Optionally provide a name for the image."""
Expand Down
1 change: 1 addition & 0 deletions aider/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -753,6 +753,7 @@ async def recreate_input(self, future=None):
await asyncio.sleep(0)
else:
self.input_task = asyncio.create_task(self.get_input(None, [], [], []))
await asyncio.sleep(0)

async def get_input(
self,
Expand Down
23 changes: 12 additions & 11 deletions aider/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -762,6 +762,7 @@ def get_io(pretty):
# TUI mode - create TUI-specific IO
output_queue = None
input_queue = None
pre_init_io = get_io(args.pretty)
if args.tui or (args.tui is None and not args.linear_output):
try:
from aider.tui import create_tui_io
Expand All @@ -776,7 +777,7 @@ def get_io(pretty):
print(f"Import error: {e}")
sys.exit(1)
else:
io = get_io(args.pretty)
io = pre_init_io

# Only do CLI-specific initialization if not in TUI mode
if not args.tui:
Expand Down Expand Up @@ -1260,35 +1261,35 @@ def apply_model_overrides(model_name):
)

if args.show_model_warnings:
problem = await models.sanity_check_models(io, main_model)
problem = await models.sanity_check_models(pre_init_io, main_model)
if problem:
io.tool_output("You can skip this check with --no-show-model-warnings")
pre_init_io.tool_output("You can skip this check with --no-show-model-warnings")

try:
await io.offer_url(
await pre_init_io.offer_url(
urls.model_warnings,
"Open documentation url for more info?",
acknowledge=True,
)
io.tool_output()
pre_init_io.tool_output()
except KeyboardInterrupt:
return await graceful_exit(coder, 1)

if args.git:
git_root = await setup_git(git_root, io)
git_root = await setup_git(git_root, pre_init_io)
if args.gitignore:
await check_gitignore(git_root, io)
await check_gitignore(git_root, pre_init_io)

except UnknownEditFormat as err:
io.tool_error(str(err))
await io.offer_url(
pre_init_io.tool_error(str(err))
await pre_init_io.offer_url(
urls.edit_formats, "Open documentation about edit formats?", acknowledge=True
)

return await graceful_exit(None, 1)

except ValueError as err:
io.tool_error(str(err))
pre_init_io.tool_error(str(err))

return await graceful_exit(None, 1)

Expand Down Expand Up @@ -1437,11 +1438,11 @@ def apply_model_overrides(model_name):
except Exception:
# Don't show errors for auto-load to avoid interrupting the user experience
pass

# TUI mode - launch Textual interface
if args.tui:
from aider.tui import launch_tui

del pre_init_io
return_code = await launch_tui(coder, output_queue, input_queue, args)
return await graceful_exit(coder, return_code)

Expand Down
8 changes: 8 additions & 0 deletions aider/sendchat.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,14 @@ def ensure_alternating_roles(messages):
msg = messages[i]
role = msg.get("role")

if (
role == "assistant"
and not msg.get("content", None)
and not msg.get("tool_calls", None)
and not msg.get("function_call", None)
):
msg["content"] = "(empty response)"

# Handle tool call sequences atomically
if role == "assistant" and "tool_calls" in msg and msg["tool_calls"]:
# Start of tool sequence - collect all related messages
Expand Down
7 changes: 6 additions & 1 deletion aider/tui/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -450,12 +450,17 @@ def on_input_area_submit(self, message: InputArea.Submit):

# Update footer to show processing
footer = self.query_one(AiderFooter)
footer.start_spinner("Thinking...")
footer.start_spinner("Processing...")

self.update_key_hints(generating=True)

self.input_queue.put({"text": user_input})

def set_input_value(self, text) -> None:
"""Find the input widget and set focus to it."""
input_area = self.query_one("#input", InputArea)
input_area.value = text

def action_focus_input(self) -> None:
"""Find the input widget and set focus to it."""
input_area = self.query_one("#input", InputArea)
Expand Down
14 changes: 14 additions & 0 deletions aider/tui/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,9 @@ def stop_spinner(self):
}
)

def interrupt_input(self):
self.interrupted = True

async def get_input(
self,
root,
Expand All @@ -278,6 +281,8 @@ async def get_input(
Returns:
User input string
"""
self.interrupted = False

# Signal TUI that we're ready for input
command_names = commands.get_commands() if commands else []

Expand Down Expand Up @@ -308,6 +313,15 @@ async def get_input(
# Wait for input from TUI (blocking in async context)
# We need to poll the queue since it's not async
while True:
if hasattr(self, "file_watcher") and self.file_watcher:
if not self.file_watcher.is_running:
self.file_watcher.start()

# Check if we were interrupted by a file change
if self.interrupted:
cmd = self.file_watcher.process_changes()
return cmd

try:
# Non-blocking get with timeout
import queue
Expand Down
Loading
Loading