From ffae1fa4c3a5624dc48c4fd870e987c894357ca6 Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Thu, 20 Nov 2025 12:24:45 -0500
Subject: [PATCH 01/10] Bump Version
---
aider/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aider/__init__.py b/aider/__init__.py
index c7b30bd3c60..aa833be4964 100644
--- a/aider/__init__.py
+++ b/aider/__init__.py
@@ -1,6 +1,6 @@
from packaging import version
-__version__ = "0.88.24.dev"
+__version__ = "0.88.25.dev"
safe_version = __version__
try:
From a17a830eeb4bb73e33c7c90e9f482a31eb470b98 Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Thu, 20 Nov 2025 23:11:18 -0500
Subject: [PATCH 02/10] Make sure repo map list and agent mode respects
aiderignore file
---
aider/coders/agent_coder.py | 2 +-
aider/coders/base_coder.py | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/aider/coders/agent_coder.py b/aider/coders/agent_coder.py
index 17ee14ac641..596c6c28f6e 100644
--- a/aider/coders/agent_coder.py
+++ b/aider/coders/agent_coder.py
@@ -1915,7 +1915,7 @@ def get_directory_structure(self):
if line.startswith("??"):
# Extract the filename (remove the '?? ' prefix)
untracked_file = line[3:]
- if not self.repo.git_ignored_file(untracked_file):
+ if not self.repo.ignored_file(untracked_file):
untracked_files.append(untracked_file)
except Exception as e:
self.io.tool_warning(f"Error getting untracked files: {str(e)}")
diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py
index 85628b0f6a5..43a316f469b 100755
--- a/aider/coders/base_coder.py
+++ b/aider/coders/base_coder.py
@@ -921,7 +921,7 @@ def _include_in_map(abs_path):
return False
if ".min." in parts[-1]:
return False
- if self.repo.git_ignored_file(abs_path):
+ if self.repo.ignored_file(abs_path):
return False
return True
From 90ac33cb8894e47b1d0d05f26cb32a1d602fce88 Mon Sep 17 00:00:00 2001
From: Paul Gauthier
Date: Thu, 20 Nov 2025 20:33:46 -0800
Subject: [PATCH 03/10] copy
---
README.md | 2 +-
aider/website/assets/sample-analytics.jsonl | 184 +++++++-------
.../website/docs/config/adv-model-settings.md | 233 ++++++++++++++++++
aider/website/docs/config/model-aliases.md | 3 +-
aider/website/docs/faq.md | 18 +-
aider/website/docs/languages.md | 4 +-
aider/website/docs/leaderboards/index.md | 2 +-
aider/website/docs/more/infinite-output.md | 27 ++
aider/website/index.html | 4 +-
9 files changed, 366 insertions(+), 111 deletions(-)
diff --git a/README.md b/README.md
index f2aecb6f69c..e9253d90169 100644
--- a/README.md
+++ b/README.md
@@ -27,7 +27,7 @@ cog.out(text)
+src="https://img.shields.io/badge/📦%20Installs-3.9M-2ecc71?style=flat-square&labelColor=555555"/>
| Model Name | Total Tokens | Percent |
-| gemini/gemini-2.5-pro | 281,824 | 38.5% |
-| gpt-5 | 211,072 | 28.9% |
-| None | 168,988 | 23.1% |
-| o3-pro | 36,620 | 5.0% |
-| gemini/gemini-2.5-flash-lite | 15,470 | 2.1% |
-| gemini/gemini-2.5-flash-lite-preview-06-17 | 11,371 | 1.6% |
-| o3 | 3,915 | 0.5% |
-| openai/REDACTED | 1,970 | 0.3% |
+| gemini/gemini-2.5-pro | 222,047 | 33.4% |
+| gpt-5 | 211,072 | 31.7% |
+| None | 168,988 | 25.4% |
+| o3-pro | 36,620 | 5.5% |
+| gemini/gemini-2.5-flash-lite | 15,470 | 2.3% |
+| gemini/gemini-2.5-flash-lite-preview-06-17 | 11,371 | 1.7% |
-
-{: .note :}
-Some models show as REDACTED, because they are new or unpopular models.
-Aider's analytics only records the names of "well known" LLMs.
## How are the "aider wrote xx% of code" stats computed?
diff --git a/aider/website/docs/languages.md b/aider/website/docs/languages.md
index d742fa93582..9bf2f20a793 100644
--- a/aider/website/docs/languages.md
+++ b/aider/website/docs/languages.md
@@ -133,7 +133,7 @@ cog.out(get_supported_languages_md())
| gstlaunch | .launch | | ✓ |
| hack | .hack | | ✓ |
| hare | .ha | | ✓ |
-| haskell | .hs | | ✓ |
+| haskell | .hs | ✓ | ✓ |
| haxe | .hx | | ✓ |
| hcl | .hcl | ✓ | ✓ |
| hcl | .tf | ✓ | ✓ |
@@ -257,7 +257,7 @@ cog.out(get_supported_languages_md())
| xml | .xml | | ✓ |
| xml | .xsl | | ✓ |
| yuck | .yuck | | ✓ |
-| zig | .zig | | ✓ |
+| zig | .zig | ✓ | ✓ |
diff --git a/aider/website/docs/leaderboards/index.md b/aider/website/docs/leaderboards/index.md
index 64463a6828e..0a4d394b040 100644
--- a/aider/website/docs/leaderboards/index.md
+++ b/aider/website/docs/leaderboards/index.md
@@ -285,6 +285,6 @@ mod_dates = [get_last_modified_date(file) for file in files]
latest_mod_date = max(mod_dates)
cog.out(f"{latest_mod_date.strftime('%B %d, %Y.')}")
]]]-->
-September 02, 2025.
+October 04, 2025.
diff --git a/aider/website/docs/more/infinite-output.md b/aider/website/docs/more/infinite-output.md
index c3bb39c51e0..6c700b45543 100644
--- a/aider/website/docs/more/infinite-output.md
+++ b/aider/website/docs/more/infinite-output.md
@@ -57,13 +57,20 @@ cog.out(model_list)
]]]-->
- anthropic.claude-3-5-haiku-20241022-v1:0
- anthropic.claude-3-5-sonnet-20241022-v2:0
+- anthropic.claude-3-7-sonnet-20240620-v1:0
- anthropic.claude-3-7-sonnet-20250219-v1:0
+- anthropic.claude-haiku-4-5-20251001-v1:0
+- anthropic.claude-haiku-4-5@20251001
- anthropic.claude-opus-4-1-20250805-v1:0
- anthropic.claude-opus-4-20250514-v1:0
- anthropic.claude-sonnet-4-20250514-v1:0
- apac.anthropic.claude-3-5-sonnet-20241022-v2:0
+- apac.anthropic.claude-haiku-4-5-20251001-v1:0
- apac.anthropic.claude-sonnet-4-20250514-v1:0
+- au.anthropic.claude-haiku-4-5-20251001-v1:0
+- au.anthropic.claude-sonnet-4-5-20250929-v1:0
- azure_ai/mistral-medium-2505
+- bedrock/us-gov-west-1/anthropic.claude-3-7-sonnet-20250219-v1:0
- bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0
- claude-3-5-haiku-20241022
- claude-3-5-haiku-latest
@@ -77,10 +84,14 @@ cog.out(model_list)
- claude-3-opus-latest
- claude-4-opus-20250514
- claude-4-sonnet-20250514
+- claude-haiku-4-5
+- claude-haiku-4-5-20251001
- claude-opus-4-1
- claude-opus-4-1-20250805
- claude-opus-4-20250514
- claude-sonnet-4-20250514
+- claude-sonnet-4-5
+- claude-sonnet-4-5-20250929
- codestral/codestral-2405
- codestral/codestral-latest
- databricks/databricks-claude-3-7-sonnet
@@ -92,9 +103,16 @@ cog.out(model_list)
- eu.anthropic.claude-3-5-haiku-20241022-v1:0
- eu.anthropic.claude-3-5-sonnet-20241022-v2:0
- eu.anthropic.claude-3-7-sonnet-20250219-v1:0
+- eu.anthropic.claude-haiku-4-5-20251001-v1:0
- eu.anthropic.claude-opus-4-1-20250805-v1:0
- eu.anthropic.claude-opus-4-20250514-v1:0
- eu.anthropic.claude-sonnet-4-20250514-v1:0
+- eu.anthropic.claude-sonnet-4-5-20250929-v1:0
+- global.anthropic.claude-haiku-4-5-20251001-v1:0
+- global.anthropic.claude-sonnet-4-20250514-v1:0
+- global.anthropic.claude-sonnet-4-5-20250929-v1:0
+- jp.anthropic.claude-haiku-4-5-20251001-v1:0
+- jp.anthropic.claude-sonnet-4-5-20250929-v1:0
- mistral/codestral-2405
- mistral/codestral-latest
- mistral/codestral-mamba-latest
@@ -102,6 +120,7 @@ cog.out(model_list)
- mistral/devstral-small-2505
- mistral/devstral-small-2507
- mistral/magistral-medium-2506
+- mistral/magistral-medium-2509
- mistral/magistral-medium-latest
- mistral/magistral-small-2506
- mistral/magistral-small-latest
@@ -127,18 +146,23 @@ cog.out(model_list)
- mistral/pixtral-large-latest
- openrouter/anthropic/claude-3.5-sonnet
- openrouter/anthropic/claude-3.7-sonnet
+- openrouter/anthropic/claude-haiku-4.5
- openrouter/anthropic/claude-opus-4
- openrouter/anthropic/claude-opus-4.1
- openrouter/anthropic/claude-sonnet-4
+- openrouter/anthropic/claude-sonnet-4.5
- openrouter/deepseek/deepseek-chat-v3.1
- openrouter/deepseek/deepseek-r1
- openrouter/deepseek/deepseek-r1-0528
+- openrouter/deepseek/deepseek-v3.2-exp
- us.anthropic.claude-3-5-haiku-20241022-v1:0
- us.anthropic.claude-3-5-sonnet-20241022-v2:0
- us.anthropic.claude-3-7-sonnet-20250219-v1:0
+- us.anthropic.claude-haiku-4-5-20251001-v1:0
- us.anthropic.claude-opus-4-1-20250805-v1:0
- us.anthropic.claude-opus-4-20250514-v1:0
- us.anthropic.claude-sonnet-4-20250514-v1:0
+- us.anthropic.claude-sonnet-4-5-20250929-v1:0
- vertex_ai/claude-3-5-haiku
- vertex_ai/claude-3-5-haiku@20241022
- vertex_ai/claude-3-5-sonnet
@@ -152,11 +176,14 @@ cog.out(model_list)
- vertex_ai/claude-3-opus@20240229
- vertex_ai/claude-3-sonnet
- vertex_ai/claude-3-sonnet@20240229
+- vertex_ai/claude-haiku-4-5@20251001
- vertex_ai/claude-opus-4
- vertex_ai/claude-opus-4-1
- vertex_ai/claude-opus-4-1@20250805
- vertex_ai/claude-opus-4@20250514
- vertex_ai/claude-sonnet-4
+- vertex_ai/claude-sonnet-4-5
+- vertex_ai/claude-sonnet-4-5@20250929
- vertex_ai/claude-sonnet-4@20250514
- vertex_ai/deepseek-ai/deepseek-r1-0528-maas
- vertex_ai/deepseek-ai/deepseek-v3.1-maas
diff --git a/aider/website/index.html b/aider/website/index.html
index 54c64d83a56..94078762bef 100644
--- a/aider/website/index.html
+++ b/aider/website/index.html
@@ -69,11 +69,11 @@ AI pair programming in your terminal
]]]-->
⭐ GitHub Stars
- 38K
+ 39K
📦 Installs
- 3.4M
+ 3.9M
📈 Tokens/week
From f626e44a0d274b9da65cc5d78f0f2409668254f0 Mon Sep 17 00:00:00 2001
From: Paul Gauthier
Date: Thu, 20 Nov 2025 20:34:03 -0800
Subject: [PATCH 04/10] copy
---
aider/website/docs/leaderboards/index.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aider/website/docs/leaderboards/index.md b/aider/website/docs/leaderboards/index.md
index 0a4d394b040..e3072f99e85 100644
--- a/aider/website/docs/leaderboards/index.md
+++ b/aider/website/docs/leaderboards/index.md
@@ -285,6 +285,6 @@ mod_dates = [get_last_modified_date(file) for file in files]
latest_mod_date = max(mod_dates)
cog.out(f"{latest_mod_date.strftime('%B %d, %Y.')}")
]]]-->
-October 04, 2025.
+November 20, 2025.
From c6284dc67856640b47729e9d748e09d4bf5428f8 Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Thu, 20 Nov 2025 23:39:25 -0500
Subject: [PATCH 05/10] #173: json5 is unnecessary to load model metadata,
improve start up time
---
aider/models.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/aider/models.py b/aider/models.py
index 8f5380136ca..de6ccd0946e 100644
--- a/aider/models.py
+++ b/aider/models.py
@@ -12,7 +12,6 @@
from pathlib import Path
from typing import Optional, Union
-import json5
import yaml
from PIL import Image
@@ -1095,7 +1094,7 @@ def register_litellm_models(model_fnames):
data = Path(model_fname).read_text()
if not data.strip():
continue
- model_def = json5.loads(data)
+ model_def = json.loads(data)
if not model_def:
continue
From 5fa6d5d71922a34c49a0e7eb98d3a60ef6416b13 Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Fri, 21 Nov 2025 00:20:42 -0500
Subject: [PATCH 06/10] Update model metadata with LiteLLMs defaults
---
aider/resources/model-metadata.json | 658 ++++++++++++++++++++++++++--
1 file changed, 615 insertions(+), 43 deletions(-)
diff --git a/aider/resources/model-metadata.json b/aider/resources/model-metadata.json
index 2a8c960e45a..199c02f2f52 100644
--- a/aider/resources/model-metadata.json
+++ b/aider/resources/model-metadata.json
@@ -1297,6 +1297,132 @@
"supports_tool_choice": true,
"supports_vision": true
},
+ "azure/eu/gpt-5.1": {
+ "cache_read_input_token_cost": 1.4e-07,
+ "input_cost_per_token": 1.38e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 1.1e-05,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text",
+ "image"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/eu/gpt-5.1-chat": {
+ "cache_read_input_token_cost": 1.4e-07,
+ "input_cost_per_token": 1.38e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 1.1e-05,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text",
+ "image"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/eu/gpt-5.1-codex": {
+ "cache_read_input_token_cost": 1.4e-07,
+ "input_cost_per_token": 1.38e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "responses",
+ "output_cost_per_token": 1.1e-05,
+ "supported_endpoints": [
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": false,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/eu/gpt-5.1-codex-mini": {
+ "cache_read_input_token_cost": 2.8e-08,
+ "input_cost_per_token": 2.75e-07,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "responses",
+ "output_cost_per_token": 2.2e-06,
+ "supported_endpoints": [
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": false,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
"azure/eu/gpt-5-nano-2025-08-07": {
"cache_read_input_token_cost": 5.5e-09,
"input_cost_per_token": 5.5e-08,
@@ -1471,6 +1597,132 @@
"supports_tool_choice": true,
"supports_vision": true
},
+ "azure/global/gpt-5.1": {
+ "cache_read_input_token_cost": 1.25e-07,
+ "input_cost_per_token": 1.25e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 1e-05,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text",
+ "image"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/global/gpt-5.1-chat": {
+ "cache_read_input_token_cost": 1.25e-07,
+ "input_cost_per_token": 1.25e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 1e-05,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text",
+ "image"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/global/gpt-5.1-codex": {
+ "cache_read_input_token_cost": 1.25e-07,
+ "input_cost_per_token": 1.25e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "responses",
+ "output_cost_per_token": 1e-05,
+ "supported_endpoints": [
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": false,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/global/gpt-5.1-codex-mini": {
+ "cache_read_input_token_cost": 2.5e-08,
+ "input_cost_per_token": 2.5e-07,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "responses",
+ "output_cost_per_token": 2e-06,
+ "supported_endpoints": [
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": false,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
"azure/gpt-3.5-turbo": {
"input_cost_per_token": 5e-07,
"litellm_provider": "azure",
@@ -2672,7 +2924,133 @@
"image"
],
"supported_output_modalities": [
- "text"
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/gpt-5-nano": {
+ "cache_read_input_token_cost": 5e-09,
+ "input_cost_per_token": 5e-08,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 4e-07,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/gpt-5-nano-2025-08-07": {
+ "cache_read_input_token_cost": 5e-09,
+ "input_cost_per_token": 5e-08,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 4e-07,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/gpt-5-pro": {
+ "input_cost_per_token": 1.5e-05,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 400000,
+ "mode": "responses",
+ "output_cost_per_token": 0.00012,
+ "source": "https://learn.microsoft.com/en-us/azure/ai-foundry/foundry-models/concepts/models-sold-directly-by-azure?pivots=azure-openai&tabs=global-standard-aoai%2Cstandard-chat-completions%2Cglobal-standard#gpt-5",
+ "supported_endpoints": [
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/gpt-5.1": {
+ "cache_read_input_token_cost": 1.25e-07,
+ "input_cost_per_token": 1.25e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 1e-05,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text",
+ "image"
],
"supports_function_calling": true,
"supports_native_streaming": true,
@@ -2685,15 +3063,15 @@
"supports_tool_choice": true,
"supports_vision": true
},
- "azure/gpt-5-nano": {
- "cache_read_input_token_cost": 5e-09,
- "input_cost_per_token": 5e-08,
+ "azure/gpt-5.1-chat": {
+ "cache_read_input_token_cost": 1.25e-07,
+ "input_cost_per_token": 1.25e-06,
"litellm_provider": "azure",
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
"mode": "chat",
- "output_cost_per_token": 4e-07,
+ "output_cost_per_token": 1e-05,
"supported_endpoints": [
"/v1/chat/completions",
"/v1/batch",
@@ -2704,7 +3082,8 @@
"image"
],
"supported_output_modalities": [
- "text"
+ "text",
+ "image"
],
"supports_function_calling": true,
"supports_native_streaming": true,
@@ -2717,18 +3096,16 @@
"supports_tool_choice": true,
"supports_vision": true
},
- "azure/gpt-5-nano-2025-08-07": {
- "cache_read_input_token_cost": 5e-09,
- "input_cost_per_token": 5e-08,
+ "azure/gpt-5.1-codex": {
+ "cache_read_input_token_cost": 1.25e-07,
+ "input_cost_per_token": 1.25e-06,
"litellm_provider": "azure",
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "chat",
- "output_cost_per_token": 4e-07,
+ "mode": "responses",
+ "output_cost_per_token": 1e-05,
"supported_endpoints": [
- "/v1/chat/completions",
- "/v1/batch",
"/v1/responses"
],
"supported_modalities": [
@@ -2745,19 +3122,19 @@
"supports_prompt_caching": true,
"supports_reasoning": true,
"supports_response_schema": true,
- "supports_system_messages": true,
+ "supports_system_messages": false,
"supports_tool_choice": true,
"supports_vision": true
},
- "azure/gpt-5-pro": {
- "input_cost_per_token": 1.5e-05,
+ "azure/gpt-5.1-codex-mini": {
+ "cache_read_input_token_cost": 2.5e-08,
+ "input_cost_per_token": 2.5e-07,
"litellm_provider": "azure",
"max_input_tokens": 272000,
"max_output_tokens": 128000,
- "max_tokens": 400000,
+ "max_tokens": 128000,
"mode": "responses",
- "output_cost_per_token": 0.00012,
- "source": "https://learn.microsoft.com/en-us/azure/ai-foundry/foundry-models/concepts/models-sold-directly-by-azure?pivots=azure-openai&tabs=global-standard-aoai%2Cstandard-chat-completions%2Cglobal-standard#gpt-5",
+ "output_cost_per_token": 2e-06,
"supported_endpoints": [
"/v1/responses"
],
@@ -2769,12 +3146,13 @@
"text"
],
"supports_function_calling": true,
+ "supports_native_streaming": true,
"supports_parallel_function_calling": true,
"supports_pdf_input": true,
"supports_prompt_caching": true,
"supports_reasoning": true,
"supports_response_schema": true,
- "supports_system_messages": true,
+ "supports_system_messages": false,
"supports_tool_choice": true,
"supports_vision": true
},
@@ -3695,6 +4073,132 @@
"supports_tool_choice": true,
"supports_vision": true
},
+ "azure/us/gpt-5.1": {
+ "cache_read_input_token_cost": 1.4e-07,
+ "input_cost_per_token": 1.38e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 1.1e-05,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text",
+ "image"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/us/gpt-5.1-chat": {
+ "cache_read_input_token_cost": 1.4e-07,
+ "input_cost_per_token": 1.38e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "chat",
+ "output_cost_per_token": 1.1e-05,
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/batch",
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text",
+ "image"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/us/gpt-5.1-codex": {
+ "cache_read_input_token_cost": 1.4e-07,
+ "input_cost_per_token": 1.38e-06,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "responses",
+ "output_cost_per_token": 1.1e-05,
+ "supported_endpoints": [
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": false,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
+ "azure/us/gpt-5.1-codex-mini": {
+ "cache_read_input_token_cost": 2.8e-08,
+ "input_cost_per_token": 2.75e-07,
+ "litellm_provider": "azure",
+ "max_input_tokens": 272000,
+ "max_output_tokens": 128000,
+ "max_tokens": 128000,
+ "mode": "responses",
+ "output_cost_per_token": 2.2e-06,
+ "supported_endpoints": [
+ "/v1/responses"
+ ],
+ "supported_modalities": [
+ "text",
+ "image"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_function_calling": true,
+ "supports_native_streaming": true,
+ "supports_parallel_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": false,
+ "supports_tool_choice": true,
+ "supports_vision": true
+ },
"azure/us/o1-2024-12-17": {
"cache_read_input_token_cost": 8.25e-06,
"input_cost_per_token": 1.65e-05,
@@ -11281,10 +11785,12 @@
"supports_web_search": true
},
"gemini-3-pro-preview": {
- "cache_read_input_token_cost": 1.25e-07,
+ "cache_read_input_token_cost": 2e-07,
+ "cache_read_input_token_cost_above_200k_tokens": 4e-07,
"cache_creation_input_token_cost_above_200k_tokens": 2.5e-07,
"input_cost_per_token": 2e-06,
"input_cost_per_token_above_200k_tokens": 4e-06,
+ "input_cost_per_token_batches": 1e-06,
"litellm_provider": "vertex_ai-language-models",
"max_audio_length_hours": 8.4,
"max_audio_per_prompt": 1,
@@ -11298,10 +11804,60 @@
"mode": "chat",
"output_cost_per_token": 1.2e-05,
"output_cost_per_token_above_200k_tokens": 1.8e-05,
+ "output_cost_per_token_batches": 6e-06,
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
"supported_endpoints": [
"/v1/chat/completions",
- "/v1/completions"
+ "/v1/completions",
+ "/v1/batch"
+ ],
+ "supported_modalities": [
+ "text",
+ "image",
+ "audio",
+ "video"
+ ],
+ "supported_output_modalities": [
+ "text"
+ ],
+ "supports_audio_input": true,
+ "supports_function_calling": true,
+ "supports_pdf_input": true,
+ "supports_prompt_caching": true,
+ "supports_reasoning": true,
+ "supports_response_schema": true,
+ "supports_system_messages": true,
+ "supports_tool_choice": true,
+ "supports_video_input": true,
+ "supports_vision": true,
+ "supports_web_search": true
+ },
+ "vertex_ai/gemini-3-pro-preview": {
+ "cache_read_input_token_cost": 2e-07,
+ "cache_read_input_token_cost_above_200k_tokens": 4e-07,
+ "cache_creation_input_token_cost_above_200k_tokens": 2.5e-07,
+ "input_cost_per_token": 2e-06,
+ "input_cost_per_token_above_200k_tokens": 4e-06,
+ "input_cost_per_token_batches": 1e-06,
+ "litellm_provider": "vertex_ai",
+ "max_audio_length_hours": 8.4,
+ "max_audio_per_prompt": 1,
+ "max_images_per_prompt": 3000,
+ "max_input_tokens": 1048576,
+ "max_output_tokens": 65535,
+ "max_pdf_size_mb": 30,
+ "max_tokens": 65535,
+ "max_video_length": 1,
+ "max_videos_per_prompt": 10,
+ "mode": "chat",
+ "output_cost_per_token": 1.2e-05,
+ "output_cost_per_token_above_200k_tokens": 1.8e-05,
+ "output_cost_per_token_batches": 6e-06,
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
+ "supported_endpoints": [
+ "/v1/chat/completions",
+ "/v1/completions",
+ "/v1/batch"
],
"supported_modalities": [
"text",
@@ -12984,9 +13540,11 @@
"tpm": 800000
},
"gemini/gemini-3-pro-preview": {
- "cache_read_input_token_cost": 3.125e-07,
+ "cache_read_input_token_cost": 2e-07,
+ "cache_read_input_token_cost_above_200k_tokens": 4e-07,
"input_cost_per_token": 2e-06,
"input_cost_per_token_above_200k_tokens": 4e-06,
+ "input_cost_per_token_batches": 1e-06,
"litellm_provider": "gemini",
"max_audio_length_hours": 8.4,
"max_audio_per_prompt": 1,
@@ -13000,11 +13558,13 @@
"mode": "chat",
"output_cost_per_token": 1.2e-05,
"output_cost_per_token_above_200k_tokens": 1.8e-05,
+ "output_cost_per_token_batches": 6e-06,
"rpm": 2000,
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing",
"supported_endpoints": [
"/v1/chat/completions",
- "/v1/completions"
+ "/v1/completions",
+ "/v1/batch"
],
"supported_modalities": [
"text",
@@ -14635,7 +15195,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1e-05,
"output_cost_per_token_flex": 5e-06,
"output_cost_per_token_priority": 2e-05,
@@ -14672,7 +15232,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1e-05,
"output_cost_per_token_priority": 2e-05,
"supported_endpoints": [
@@ -14708,7 +15268,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1e-05,
"output_cost_per_token_priority": 2e-05,
"supported_endpoints": [
@@ -14744,7 +15304,7 @@
"max_input_tokens": 128000,
"max_output_tokens": 16384,
"max_tokens": 16384,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1e-05,
"output_cost_per_token_priority": 2e-05,
"supported_endpoints": [
@@ -14847,7 +15407,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1e-05,
"output_cost_per_token_flex": 5e-06,
"output_cost_per_token_priority": 2e-05,
@@ -14882,7 +15442,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1e-05,
"supported_endpoints": [
"/v1/chat/completions",
@@ -14914,7 +15474,7 @@
"max_input_tokens": 128000,
"max_output_tokens": 16384,
"max_tokens": 16384,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1e-05,
"supported_endpoints": [
"/v1/chat/completions",
@@ -15046,7 +15606,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 2e-06,
"output_cost_per_token_flex": 1e-06,
"output_cost_per_token_priority": 3.6e-06,
@@ -15085,7 +15645,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 2e-06,
"output_cost_per_token_flex": 1e-06,
"output_cost_per_token_priority": 3.6e-06,
@@ -15123,7 +15683,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 4e-07,
"output_cost_per_token_flex": 2e-07,
"supported_endpoints": [
@@ -15158,7 +15718,7 @@
"max_input_tokens": 272000,
"max_output_tokens": 128000,
"max_tokens": 128000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 4e-07,
"output_cost_per_token_flex": 2e-07,
"supported_endpoints": [
@@ -18011,7 +18571,7 @@
"max_input_tokens": 200000,
"max_output_tokens": 100000,
"max_tokens": 100000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 6e-05,
"supports_function_calling": true,
"supports_parallel_function_calling": true,
@@ -18030,7 +18590,7 @@
"max_input_tokens": 128000,
"max_output_tokens": 65536,
"max_tokens": 65536,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 4.4e-06,
"supports_pdf_input": true,
"supports_prompt_caching": true,
@@ -18044,7 +18604,7 @@
"max_input_tokens": 128000,
"max_output_tokens": 65536,
"max_tokens": 65536,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 1.2e-05,
"supports_pdf_input": true,
"supports_prompt_caching": true,
@@ -18058,7 +18618,7 @@
"max_input_tokens": 128000,
"max_output_tokens": 32768,
"max_tokens": 32768,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 6e-05,
"supports_pdf_input": true,
"supports_prompt_caching": true,
@@ -18072,7 +18632,7 @@
"max_input_tokens": 128000,
"max_output_tokens": 32768,
"max_tokens": 32768,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 6e-05,
"supports_pdf_input": true,
"supports_prompt_caching": true,
@@ -18188,7 +18748,7 @@
"max_input_tokens": 200000,
"max_output_tokens": 100000,
"max_tokens": 100000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 8e-06,
"supported_endpoints": [
"/v1/responses",
@@ -18286,7 +18846,7 @@
"max_input_tokens": 200000,
"max_output_tokens": 100000,
"max_tokens": 100000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 4.4e-06,
"supports_function_calling": true,
"supports_parallel_function_calling": false,
@@ -18303,7 +18863,7 @@
"max_input_tokens": 200000,
"max_output_tokens": 100000,
"max_tokens": 100000,
- "mode": "responses",
+ "mode": "chat",
"output_cost_per_token": 4.4e-06,
"supports_function_calling": true,
"supports_parallel_function_calling": false,
@@ -23942,6 +24502,12 @@
"supports_reasoning": true,
"supports_tool_choice": true
},
+ "vertex_ai/gemini-2.5-flash-image": {
+ "litellm_provider": "vertex_ai-language-models",
+ "mode": "image_generation",
+ "output_cost_per_image": 0.039,
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/image-generation#edit-an-image"
+ },
"vertex_ai/imagegeneration@006": {
"litellm_provider": "vertex_ai-image-models",
"mode": "image_generation",
@@ -23966,6 +24532,12 @@
"output_cost_per_image": 0.04,
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
},
+ "vertex_ai/imagen-3.0-capability-001": {
+ "litellm_provider": "vertex_ai-image-models",
+ "mode": "image_generation",
+ "output_cost_per_image": 0.04,
+ "source": "https://cloud.google.com/vertex-ai/generative-ai/docs/image/edit-insert-objects"
+ },
"vertex_ai/imagen-4.0-fast-generate-001": {
"litellm_provider": "vertex_ai-image-models",
"mode": "image_generation",
From 3ff492699bbef67656f8d45e69cb184047b22df5 Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Fri, 21 Nov 2025 00:41:05 -0500
Subject: [PATCH 07/10] Update contributor list
---
README.md | 34 +++++++++++++++++++++++++++++----
scripts/get_contributor_list.js | 13 +++++++++++++
2 files changed, 43 insertions(+), 4 deletions(-)
create mode 100644 scripts/get_contributor_list.js
diff --git a/README.md b/README.md
index 3e37fb6f672..ed6b6973bf7 100644
--- a/README.md
+++ b/README.md
@@ -100,7 +100,33 @@ Use the tool installation so aider doesn't interfere with your development envir
### All Contributors (Both Aider Main and Aider-CE)
-
-
-
-
+@paul-gauthier
+@dwash96
+@tekacs
+@ei-grad
+@joshuavial
+@chr15m
+@fry69
+@quinlanjager
+@caseymcc
+@shladnik
+@itlackey
+@tomjuggler
+@vk4s
+@titusz
+@daniel-vainsencher
+@bphd
+@akaihola
+@jalammar
+@schpet
+@iamFIREcracker
+@KennyDizi
+@ivanfioravanti
+@mdeweerd
+@fahmad91
+@itsmeknt
+@cheahjs
+@youknow04
+@pcamp
+@miradnanali
+@o-nix
\ No newline at end of file
diff --git a/scripts/get_contributor_list.js b/scripts/get_contributor_list.js
new file mode 100644
index 00000000000..23075d4e61f
--- /dev/null
+++ b/scripts/get_contributor_list.js
@@ -0,0 +1,13 @@
+(async function get_contributors(){
+ const response = await fetch("https://api.github.com/repos/dwash96/aider-ce/contributors");
+ const data = await response.json();
+ console.log(data)
+
+ let output = [];
+
+ data.forEach((item) => {
+ output.push(`@${item.login}`)
+ });
+
+ console.log(output.join("\n"))
+})()
\ No newline at end of file
From 6517c138a9267cec004a28f203489859742390d9 Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Fri, 21 Nov 2025 00:43:31 -0500
Subject: [PATCH 08/10] Documentation and installation before project road map
---
README.md | 60 +++++++++++++++++++++++++++----------------------------
1 file changed, 30 insertions(+), 30 deletions(-)
diff --git a/README.md b/README.md
index ed6b6973bf7..11c23f16b0e 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,33 @@
+### Documentation and Other Notes
+* [Agent Mode](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/config/agent-mode.md)
+* [MCP Configuration](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/config/mcp.md)
+* [Session Management](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/sessions.md)
+* [Aider Original Documentation (still mostly applies)](https://aider.chat/)
+* [Discord Community](https://discord.gg/McwdCRuqkJ)
+
+### Installation Instructions
+This project can be installed using several methods:
+
+### Package Installation
+```bash
+pip install aider-ce
+```
+
+or
+
+```bash
+uv pip install aider-ce
+```
+
+The package exports an `aider-ce` command that accepts all of Aider's configuration options
+
+### Tool Installation
+```bash
+uv tool install --python python3.12 aider-ce
+```
+
+Use the tool installation so aider doesn't interfere with your development environment
+
## Project Roadmap/Goals
The current priorities are to improve core capabilities and user experience of the Aider project
@@ -35,36 +65,6 @@ The current priorities are to improve core capabilities and user experience of t
* [ ] Add a plugin-like system for allowing agent mode to use user-defined tools in simple python files
* [ ] Add a dynamic tool discovery tool to allow the system to have only the tools it needs in context
-### Documentation and Other Notes
-* [Agent Mode](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/config/agent-mode.md)
-* [MCP Configuration](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/config/mcp.md)
-* [Session Management](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/sessions.md)
-* [Aider Original Documentation (still mostly applies)](https://aider.chat/)
-* [Discord Community](https://discord.gg/McwdCRuqkJ)
-
-### Installation Instructions
-This project can be installed using several methods:
-
-### Package Installation
-```bash
-pip install aider-ce
-```
-
-or
-
-```bash
-uv pip install aider-ce
-```
-
-The package exports an `aider-ce` command that accepts all of Aider's configuration options
-
-### Tool Installation
-```bash
-uv tool install --python python3.12 aider-ce
-```
-
-Use the tool installation so aider doesn't interfere with your development environment
-
### Merged PRs
* [MCP: #3937](https://github.com/Aider-AI/aider/pull/3937)
From 6d78c8a348cfe1b549ff9d681cdfcdc0551d7713 Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Fri, 21 Nov 2025 00:46:57 -0500
Subject: [PATCH 09/10] Add CHANGELOG.md, move differences there
---
.gitignore | 1 +
CHANGELOG.md | 32 ++++++++++++++++++++++++++++++++
README.md | 34 +---------------------------------
3 files changed, 34 insertions(+), 33 deletions(-)
create mode 100644 CHANGELOG.md
diff --git a/.gitignore b/.gitignore
index b34f19f0644..2c147719a65 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,6 +18,7 @@
!/.flake8
!/.gitignore
!/.pre-commit-config.yaml
+!/CHANGELOG.md
!/CNAME
!/CONTRIBUTING.metadata
!/HISTORY.md
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000000..988b4e2c460
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,32 @@
+### Merged PRs
+
+* [MCP: #3937](https://github.com/Aider-AI/aider/pull/3937)
+ * [MCP Multi Tool Response](https://github.com/quinlanjager/aider/pull/1)
+* [Navigator Mode: #3781](https://github.com/Aider-AI/aider/pull/3781)
+ * [Navigator Mode Large File Count](https://github.com/Aider-AI/aider/commit/b88a7bda649931798209945d9687718316c7427f)
+ * [Fix navigator mode auto commit](https://github.com/dwash96/aider-ce/issues/38)
+* [Qwen 3: #4383](https://github.com/Aider-AI/aider/pull/4383)
+* [Fuzzy Search: #4366](https://github.com/Aider-AI/aider/pull/4366)
+* [Map Cache Location Config: #2911](https://github.com/Aider-AI/aider/pull/2911)
+* [Enhanced System Prompts: #3804](https://github.com/Aider-AI/aider/pull/3804)
+* [Repo Map File Name Truncation Fix: #4320](https://github.com/Aider-AI/aider/pull/4320)
+* [Read Only Stub Files For Context Window Management : #3056](https://github.com/Aider-AI/aider/pull/3056)
+
+### Other Updates
+
+* [Added Remote MCP Tool Calls With HTTP Streaming](https://github.com/Aider-AI/aider/commit/a86039f73579df7c32fee910967827c9fccdec0d)
+ * [Enforce single tool call at a time](https://github.com/Aider-AI/aider/commit/3346c3e6194096cef64b1899b017bde36a65f794)
+ * [Upgraded MCP dep to 1.12.3 for Remote MCP Tool Calls](https://github.com/dwash96/aider-ce/commit/a91ee1c03627a31093364fd2a09e654781b1b879)
+ * [Updated base Python version to 3.12 to better support navigator mode (might consider undoing this, if dependency list supports it)](https://github.com/dwash96/aider-ce/commit/9ed416d523c11362a3ba9fc4c02134e0e79d41fc)
+* [Suppress LiteLLM asyncio errors that clutter output](https://github.com/Aider-AI/aider/issues/6)
+* [Updated Docker File Build Process](https://github.com/Aider-AI/aider/commit/cbab01458d0a35c03b30ac2f6347a74fc2b9f662)
+ * [Manually install necessary ubuntu dependencies](https://github.com/dwash96/aider-ce/issues/14)
+* [.gitignore updates](https://github.com/dwash96/aider-ce/commit/7c7e803fa63d1acd860eef1423e5a03220df6017)
+* [Experimental Context Compaction For Longer Running Generation Tasks](https://github.com/Aider-AI/aider/issues/6)
+* [Edit Before Adding Files and Reflecting](https://github.com/dwash96/aider-ce/pull/22)
+* [Fix Deepseek model configurations](https://github.com/Aider-AI/aider/commit/c839a6dd8964d702172cae007375e299732d3823)
+* [Relax Version Pinning For Easier Distribution](https://github.com/dwash96/aider-ce/issues/18)
+* [Remove Confirm Responses from History](https://github.com/Aider-AI/aider/pull/3958)
+* [Benchmark Results By Language](https://github.com/dwash96/aider-ce/pull/27)
+* [Allow Benchmarks to Use Repo Map For Better Accuracy](https://github.com/dwash96/aider-ce/pull/25)
+* [Read File Globbing](https://github.com/Aider-AI/aider/pull/3395)
diff --git a/README.md b/README.md
index 11c23f16b0e..2026c3ef340 100644
--- a/README.md
+++ b/README.md
@@ -3,6 +3,7 @@
* [MCP Configuration](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/config/mcp.md)
* [Session Management](https://github.com/dwash96/aider-ce/blob/main/aider/website/docs/sessions.md)
* [Aider Original Documentation (still mostly applies)](https://aider.chat/)
+* [Changelog](https://github.com/dwash96/aider-ce/blob/main/CHANGELOG.md)
* [Discord Community](https://discord.gg/McwdCRuqkJ)
### Installation Instructions
@@ -65,39 +66,6 @@ The current priorities are to improve core capabilities and user experience of t
* [ ] Add a plugin-like system for allowing agent mode to use user-defined tools in simple python files
* [ ] Add a dynamic tool discovery tool to allow the system to have only the tools it needs in context
-### Merged PRs
-
-* [MCP: #3937](https://github.com/Aider-AI/aider/pull/3937)
- * [MCP Multi Tool Response](https://github.com/quinlanjager/aider/pull/1)
-* [Navigator Mode: #3781](https://github.com/Aider-AI/aider/pull/3781)
- * [Navigator Mode Large File Count](https://github.com/Aider-AI/aider/commit/b88a7bda649931798209945d9687718316c7427f)
- * [Fix navigator mode auto commit](https://github.com/dwash96/aider-ce/issues/38)
-* [Qwen 3: #4383](https://github.com/Aider-AI/aider/pull/4383)
-* [Fuzzy Search: #4366](https://github.com/Aider-AI/aider/pull/4366)
-* [Map Cache Location Config: #2911](https://github.com/Aider-AI/aider/pull/2911)
-* [Enhanced System Prompts: #3804](https://github.com/Aider-AI/aider/pull/3804)
-* [Repo Map File Name Truncation Fix: #4320](https://github.com/Aider-AI/aider/pull/4320)
-* [Read Only Stub Files For Context Window Management : #3056](https://github.com/Aider-AI/aider/pull/3056)
-
-### Other Updates
-
-* [Added Remote MCP Tool Calls With HTTP Streaming](https://github.com/Aider-AI/aider/commit/a86039f73579df7c32fee910967827c9fccdec0d)
- * [Enforce single tool call at a time](https://github.com/Aider-AI/aider/commit/3346c3e6194096cef64b1899b017bde36a65f794)
- * [Upgraded MCP dep to 1.12.3 for Remote MCP Tool Calls](https://github.com/dwash96/aider-ce/commit/a91ee1c03627a31093364fd2a09e654781b1b879)
- * [Updated base Python version to 3.12 to better support navigator mode (might consider undoing this, if dependency list supports it)](https://github.com/dwash96/aider-ce/commit/9ed416d523c11362a3ba9fc4c02134e0e79d41fc)
-* [Suppress LiteLLM asyncio errors that clutter output](https://github.com/Aider-AI/aider/issues/6)
-* [Updated Docker File Build Process](https://github.com/Aider-AI/aider/commit/cbab01458d0a35c03b30ac2f6347a74fc2b9f662)
- * [Manually install necessary ubuntu dependencies](https://github.com/dwash96/aider-ce/issues/14)
-* [.gitignore updates](https://github.com/dwash96/aider-ce/commit/7c7e803fa63d1acd860eef1423e5a03220df6017)
-* [Experimental Context Compaction For Longer Running Generation Tasks](https://github.com/Aider-AI/aider/issues/6)
-* [Edit Before Adding Files and Reflecting](https://github.com/dwash96/aider-ce/pull/22)
-* [Fix Deepseek model configurations](https://github.com/Aider-AI/aider/commit/c839a6dd8964d702172cae007375e299732d3823)
-* [Relax Version Pinning For Easier Distribution](https://github.com/dwash96/aider-ce/issues/18)
-* [Remove Confirm Responses from History](https://github.com/Aider-AI/aider/pull/3958)
-* [Benchmark Results By Language](https://github.com/dwash96/aider-ce/pull/27)
-* [Allow Benchmarks to Use Repo Map For Better Accuracy](https://github.com/dwash96/aider-ce/pull/25)
-* [Read File Globbing](https://github.com/Aider-AI/aider/pull/3395)
-
### All Contributors (Both Aider Main and Aider-CE)
@paul-gauthier
From f28ebca1f4640a0fa610830527caacb09c22a04c Mon Sep 17 00:00:00 2001
From: Dustin Washington
Date: Fri, 21 Nov 2025 01:25:45 -0500
Subject: [PATCH 10/10] Update for whole file update getting messed up by
atempt at progress bar
---
aider/diffs.py | 19 ++++++++++---------
1 file changed, 10 insertions(+), 9 deletions(-)
diff --git a/aider/diffs.py b/aider/diffs.py
index 46266ac6780..709810452a0 100644
--- a/aider/diffs.py
+++ b/aider/diffs.py
@@ -63,17 +63,18 @@ def diff_partial_update(lines_orig, lines_updated, final=False, fname=None):
if last_non_deleted is None:
return ""
- if num_orig_lines:
- pct = last_non_deleted * 100 / num_orig_lines
- else:
- pct = 50
- bar = create_progress_bar(pct)
- bar = f" {last_non_deleted:3d} / {num_orig_lines:3d} lines [{bar}] {pct:3.0f}%\n"
+ # if num_orig_lines:
+ # pct = last_non_deleted * 100 / num_orig_lines
+ # else:
+ # pct = 50
+ # bar = create_progress_bar(pct)
+ # bar = f" {last_non_deleted:3d} / {num_orig_lines:3d} lines [{bar}] {pct:3.0f}%\n"
lines_orig = lines_orig[:last_non_deleted]
if not final:
- lines_updated = lines_updated[:-1] + [bar]
+ # lines_updated = lines_updated[:-1] + [bar]
+ lines_updated = lines_updated[:-1]
diff = difflib.unified_diff(lines_orig, lines_updated, n=5)
@@ -88,14 +89,14 @@ def diff_partial_update(lines_orig, lines_updated, final=False, fname=None):
if backticks not in diff:
break
- show = f"{backticks}diff\n"
+ show = "diff\n"
if fname:
show += f"--- {fname} original\n"
show += f"+++ {fname} updated\n"
show += diff
- show += f"{backticks}\n\n"
+ show += "\n\n"
# print(diff)