From 13c5d2f0353cb66511ea1871ab8a60efd71b6165 Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 17:06:23 +0000 Subject: [PATCH 01/11] SDK: Document Responses token streaming example - Add guide: sdk/guides/llm-responses-streaming.mdx - Link example 24_responses_streaming.py with expandable code block - Add to navigation under SDK > Guides > LLM Features Co-authored-by: openhands --- docs.json | 229 ++++++++++++++++--------- sdk/guides/llm-responses-streaming.mdx | 62 +++++++ 2 files changed, 212 insertions(+), 79 deletions(-) create mode 100644 sdk/guides/llm-responses-streaming.mdx diff --git a/docs.json b/docs.json index 00574e7a..6916f2b1 100644 --- a/docs.json +++ b/docs.json @@ -65,52 +65,52 @@ { "group": "Advanced Configuration", "pages": [ - { - "group": "LLM Configuration", - "pages": [ - "openhands/usage/llms/llms", - { - "group": "Providers", - "pages": [ - "openhands/usage/llms/openhands-llms", - "openhands/usage/llms/azure-llms", - "openhands/usage/llms/google-llms", - "openhands/usage/llms/groq", - "openhands/usage/llms/local-llms", - "openhands/usage/llms/litellm-proxy", - "openhands/usage/llms/moonshot", - "openhands/usage/llms/openai-llms", - "openhands/usage/llms/openrouter" - ] - } - ] - }, - { - "group": "Runtime Configuration", - "pages": [ - "openhands/usage/runtimes/overview", - { - "group": "Providers", - "pages": [ - "openhands/usage/runtimes/docker", - "openhands/usage/runtimes/remote", - "openhands/usage/runtimes/local", - { - "group": "Third-Party Providers", - "pages": [ - "openhands/usage/runtimes/modal", - "openhands/usage/runtimes/daytona", - "openhands/usage/runtimes/runloop", - "openhands/usage/runtimes/e2b" - ] - } - ] - } - ] - }, - "openhands/usage/advanced/configuration-options", - "openhands/usage/advanced/custom-sandbox-guide", - "openhands/usage/advanced/search-engine-setup" + { + "group": "LLM Configuration", + "pages": [ + "openhands/usage/llms/llms", + { + "group": "Providers", + "pages": [ + "openhands/usage/llms/openhands-llms", + "openhands/usage/llms/azure-llms", + "openhands/usage/llms/google-llms", + "openhands/usage/llms/groq", + "openhands/usage/llms/local-llms", + "openhands/usage/llms/litellm-proxy", + "openhands/usage/llms/moonshot", + "openhands/usage/llms/openai-llms", + "openhands/usage/llms/openrouter" + ] + } + ] + }, + { + "group": "Runtime Configuration", + "pages": [ + "openhands/usage/runtimes/overview", + { + "group": "Providers", + "pages": [ + "openhands/usage/runtimes/docker", + "openhands/usage/runtimes/remote", + "openhands/usage/runtimes/local", + { + "group": "Third-Party Providers", + "pages": [ + "openhands/usage/runtimes/modal", + "openhands/usage/runtimes/daytona", + "openhands/usage/runtimes/runloop", + "openhands/usage/runtimes/e2b" + ] + } + ] + } + ] + }, + "openhands/usage/advanced/configuration-options", + "openhands/usage/advanced/custom-sandbox-guide", + "openhands/usage/advanced/search-engine-setup" ] } ] @@ -195,7 +195,8 @@ "sdk/guides/llm-registry", "sdk/guides/llm-routing", "sdk/guides/llm-reasoning", - "sdk/guides/llm-image-input" + "sdk/guides/llm-image-input", + "sdk/guides/llm-responses-streaming" ] }, { @@ -234,8 +235,7 @@ }, { "group": "GitHub Workflows", - "pages": [ - ] + "pages": [] } ] }, @@ -261,8 +261,8 @@ ] }, { - "tab": "OpenHands (Core) API", - "openapi": "openapi/openapi.json" + "tab": "OpenHands (Core) API", + "openapi": "openapi/openapi.json" } ], "global": { @@ -290,8 +290,7 @@ "dark": "/logo/dark.png" }, "navbar": { - "links": [ - ], + "links": [], "primary": { "type": "github", "href": "https://github.com/All-Hands-AI/OpenHands" @@ -304,7 +303,7 @@ } }, "banner": { - "content": "šŸ“¢ **GitHub Org Rename:** All-Hands-AI to OpenHands on Monday Oct 20th at 18:00 UTC. [Migration details →](https://github.com/All-Hands-AI/OpenHands/issues/11376)", + "content": "\ud83d\udce2 **GitHub Org Rename:** All-Hands-AI to OpenHands on Monday Oct 20th at 18:00 UTC. [Migration details \u2192](https://github.com/All-Hands-AI/OpenHands/issues/11376)", "dismissible": true }, "head": [ @@ -324,29 +323,101 @@ ] }, "redirects": [ - { "source": "/modules/:slug*", "destination": "/:slug*"}, - { "source": "/usage/:slug*", "destination": "/openhands/usage/:slug*"}, - { "source": "/openhands/usage/configuration-options", "destination": "/openhands/usage/advanced/configuration-options" }, - { "source": "/openhands/usage/how-to/custom-sandbox-guide", "destination": "/openhands/usage/advanced/custom-sandbox-guide" }, - { "source": "/openhands/usage/search-engine-setup", "destination": "/openhands/usage/advanced/search-engine-setup" }, - { "source": "/openhands/usage/prompting/repository", "destination": "/openhands/usage/customization/repository" }, - { "source": "/openhands/usage/how-to/debugging", "destination": "/openhands/usage/developers/debugging" }, - { "source": "/openhands/usage/how-to/development-overview", "destination": "/openhands/usage/developers/development-overview" }, - { "source": "/openhands/usage/how-to/evaluation-harness", "destination": "/openhands/usage/developers/evaluation-harness" }, - { "source": "/openhands/usage/how-to/websocket-connection", "destination": "/openhands/usage/developers/websocket-connection" }, - { "source": "/openhands/usage/prompting/microagents-keyword", "destination": "/openhands/usage/microagents/microagents-keyword" }, - { "source": "/openhands/usage/prompting/microagents-org", "destination": "/openhands/usage/microagents/microagents-org" }, - { "source": "/openhands/usage/prompting/microagents-overview", "destination": "/openhands/usage/microagents/microagents-overview" }, - { "source": "/openhands/usage/prompting/microagents-public", "destination": "/openhands/usage/microagents/microagents-public" }, - { "source": "/openhands/usage/prompting/microagents-repo", "destination": "/openhands/usage/microagents/microagents-repo" }, - { "source": "/openhands/usage/installation", "destination": "/openhands/usage/quick-start" }, - { "source": "/openhands/usage/how-to/cli-mode", "destination": "/openhands/usage/run-openhands/cli-mode" }, - { "source": "/openhands/usage/how-to/github-action", "destination": "/openhands/usage/run-openhands/github-action" }, - { "source": "/openhands/usage/how-to/gui-mode", "destination": "/openhands/usage/run-openhands/gui-mode" }, - { "source": "/openhands/usage/how-to/headless-mode", "destination": "/openhands/usage/run-openhands/headless-mode" }, - { "source": "/openhands/usage/local-setup", "destination": "/openhands/usage/run-openhands/local-setup" }, - { "source": "/openhands/usage/getting-started", "destination": "/openhands/usage/start-building" }, - { "source": "/openhands/usage/prompting/prompting-best-practices", "destination": "/openhands/usage/tips/prompting-best-practices" }, - { "source": "/openhands/usage/feedback", "destination": "/openhands/usage/troubleshooting/feedback" } + { + "source": "/modules/:slug*", + "destination": "/:slug*" + }, + { + "source": "/usage/:slug*", + "destination": "/openhands/usage/:slug*" + }, + { + "source": "/openhands/usage/configuration-options", + "destination": "/openhands/usage/advanced/configuration-options" + }, + { + "source": "/openhands/usage/how-to/custom-sandbox-guide", + "destination": "/openhands/usage/advanced/custom-sandbox-guide" + }, + { + "source": "/openhands/usage/search-engine-setup", + "destination": "/openhands/usage/advanced/search-engine-setup" + }, + { + "source": "/openhands/usage/prompting/repository", + "destination": "/openhands/usage/customization/repository" + }, + { + "source": "/openhands/usage/how-to/debugging", + "destination": "/openhands/usage/developers/debugging" + }, + { + "source": "/openhands/usage/how-to/development-overview", + "destination": "/openhands/usage/developers/development-overview" + }, + { + "source": "/openhands/usage/how-to/evaluation-harness", + "destination": "/openhands/usage/developers/evaluation-harness" + }, + { + "source": "/openhands/usage/how-to/websocket-connection", + "destination": "/openhands/usage/developers/websocket-connection" + }, + { + "source": "/openhands/usage/prompting/microagents-keyword", + "destination": "/openhands/usage/microagents/microagents-keyword" + }, + { + "source": "/openhands/usage/prompting/microagents-org", + "destination": "/openhands/usage/microagents/microagents-org" + }, + { + "source": "/openhands/usage/prompting/microagents-overview", + "destination": "/openhands/usage/microagents/microagents-overview" + }, + { + "source": "/openhands/usage/prompting/microagents-public", + "destination": "/openhands/usage/microagents/microagents-public" + }, + { + "source": "/openhands/usage/prompting/microagents-repo", + "destination": "/openhands/usage/microagents/microagents-repo" + }, + { + "source": "/openhands/usage/installation", + "destination": "/openhands/usage/quick-start" + }, + { + "source": "/openhands/usage/how-to/cli-mode", + "destination": "/openhands/usage/run-openhands/cli-mode" + }, + { + "source": "/openhands/usage/how-to/github-action", + "destination": "/openhands/usage/run-openhands/github-action" + }, + { + "source": "/openhands/usage/how-to/gui-mode", + "destination": "/openhands/usage/run-openhands/gui-mode" + }, + { + "source": "/openhands/usage/how-to/headless-mode", + "destination": "/openhands/usage/run-openhands/headless-mode" + }, + { + "source": "/openhands/usage/local-setup", + "destination": "/openhands/usage/run-openhands/local-setup" + }, + { + "source": "/openhands/usage/getting-started", + "destination": "/openhands/usage/start-building" + }, + { + "source": "/openhands/usage/prompting/prompting-best-practices", + "destination": "/openhands/usage/tips/prompting-best-practices" + }, + { + "source": "/openhands/usage/feedback", + "destination": "/openhands/usage/troubleshooting/feedback" + } ] -} +} \ No newline at end of file diff --git a/sdk/guides/llm-responses-streaming.mdx b/sdk/guides/llm-responses-streaming.mdx new file mode 100644 index 00000000..c4ca998c --- /dev/null +++ b/sdk/guides/llm-responses-streaming.mdx @@ -0,0 +1,62 @@ +--- +title: Responses Streaming +description: Stream token deltas from the OpenAI Responses API path via LiteLLM. +--- + + +This example is available on GitHub: [examples/01_standalone_sdk/24_responses_streaming.py](https://github.com/All-Hands-AI/agent-sdk/blob/main/examples/01_standalone_sdk/24_responses_streaming.py) + + +Enable live token streaming when using the OpenAI Responses API path. This guide shows how to: + +- Subscribe to streaming deltas from the model +- Log streamed chunks to a JSONL file +- Optionally render streaming visually or print deltas to stdout + +```python icon="python" expandable examples/01_standalone_sdk/24_responses_streaming.py +``` + +```bash Running the Example +export LLM_API_KEY="your-openai-compatible-api-key" +# Optional overrides +# export LLM_MODEL="openhands/gpt-5-codex" +# export LLM_BASE_URL="https://your-litellm-or-provider-base-url" + +cd agent-sdk +uv run python examples/01_standalone_sdk/24_responses_streaming.py +``` + +### How It Works + +- Pass a token callback to Conversation to receive streaming chunks as they arrive: + +```python +conversation = Conversation( + agent=agent, + workspace=os.getcwd(), + token_callbacks=[on_token], +) +``` + +- Each chunk contains a delta: `text_delta` for content tokens or `arguments_delta` for tool-call arguments. The example logs a serialized record per chunk to `./logs/stream/*.jsonl`. + +- For a visual live view, use the built-in streaming visualizer: + +```python +from openhands.sdk.conversation.streaming_visualizer import create_streaming_visualizer + +visualizer = create_streaming_visualizer() +conversation = Conversation( + agent=agent, + workspace=os.getcwd(), + token_callbacks=[on_token], + callbacks=[visualizer.on_event], + visualize=False, +) +``` + +## Next Steps + +- **[Reasoning (Responses API)](/sdk/guides/llm-reasoning)** – Access model reasoning traces +- **[LLM Routing](/sdk/guides/llm-routing)** – Route requests to different models +- **[Image Input](/sdk/guides/llm-image-input)** – Send images to multimodal models From 821d027ea7ee3403c8facdf90d2199e79b58c0c1 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 23 Oct 2025 17:10:42 +0000 Subject: [PATCH 02/11] docs: sync code blocks from agent-sdk examples Synced from agent-sdk ref: main --- sdk/guides/agent-server/api-sandbox.mdx | 10 ++++----- sdk/guides/agent-server/docker-sandbox.mdx | 26 +++++++++++++++------- sdk/guides/agent-server/local-server.mdx | 4 ++-- sdk/guides/custom-tools.mdx | 2 +- sdk/guides/llm-routing.mdx | 2 +- sdk/guides/metrics.mdx | 2 +- 6 files changed, 28 insertions(+), 18 deletions(-) diff --git a/sdk/guides/agent-server/api-sandbox.mdx b/sdk/guides/agent-server/api-sandbox.mdx index 438391a6..c58b0c1b 100644 --- a/sdk/guides/agent-server/api-sandbox.mdx +++ b/sdk/guides/agent-server/api-sandbox.mdx @@ -23,7 +23,7 @@ Usage: uv run examples/24_remote_convo_with_api_sandboxed_server.py Requirements: - - LITELLM_API_KEY: API key for LLM access + - LLM_API_KEY: API key for LLM access - RUNTIME_API_KEY: API key for runtime API access """ @@ -45,13 +45,13 @@ from openhands.workspace import APIRemoteWorkspace logger = get_logger(__name__) -api_key = os.getenv("LITELLM_API_KEY") -assert api_key, "LITELLM_API_KEY required" +api_key = os.getenv("LLM_API_KEY") +assert api_key, "LLM_API_KEY required" llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) @@ -62,7 +62,7 @@ if not runtime_api_key: with APIRemoteWorkspace( - runtime_api_url="https://runtime.eval.all-hands.dev", + runtime_api_url=os.getenv("RUNTIME_API_URL", "https://runtime.eval.all-hands.dev"), runtime_api_key=runtime_api_key, server_image="ghcr.io/openhands/agent-server:main-python", ) as workspace: diff --git a/sdk/guides/agent-server/docker-sandbox.mdx b/sdk/guides/agent-server/docker-sandbox.mdx index e07b44da..6d76bde0 100644 --- a/sdk/guides/agent-server/docker-sandbox.mdx +++ b/sdk/guides/agent-server/docker-sandbox.mdx @@ -44,7 +44,7 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) @@ -258,19 +258,30 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) # Create a Docker-based remote workspace with extra ports for VSCode access + + +def detect_platform(): + """Detects the correct Docker platform string.""" + import platform + + machine = platform.machine().lower() + if "arm" in machine or "aarch64" in machine: + return "linux/arm64" + return "linux/amd64" + + with DockerWorkspace( base_image="nikolaik/python-nodejs:python3.12-nodejs22", host_port=18010, - # TODO: Change this to your platform if not linux/arm64 - platform="linux/arm64", + platform=detect_platform(), extra_ports=True, # Expose extra ports for VSCode and VNC ) as workspace: - """Extra ports allows you to access VSCode at localhost:8011""" + """Extra ports allows you to access VSCode at localhost:18011""" # Create agent agent = get_default_agent( @@ -441,7 +452,7 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) @@ -458,7 +469,6 @@ def detect_platform(): with DockerWorkspace( base_image="nikolaik/python-nodejs:python3.12-nodejs22", host_port=8010, - # TODO: Change this to your platform if not linux/arm64 platform=detect_platform(), extra_ports=True, # Expose extra ports for VSCode and VNC ) as workspace: @@ -492,7 +502,7 @@ with DockerWorkspace( logger.info(f"\nšŸ“‹ Conversation ID: {conversation.state.id}") logger.info("šŸ“ Sending first message...") conversation.send_message( - "Could you go to https://all-hands.dev/ blog page and summarize main " + "Could you go to https://openhands.dev/ blog page and summarize main " "points of the latest blog?" ) conversation.run() diff --git a/sdk/guides/agent-server/local-server.mdx b/sdk/guides/agent-server/local-server.mdx index 0c0b3b1c..3aa92104 100644 --- a/sdk/guides/agent-server/local-server.mdx +++ b/sdk/guides/agent-server/local-server.mdx @@ -139,13 +139,13 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) title_gen_llm = LLM( usage_id="title-gen-llm", model="litellm_proxy/openai/gpt-5-mini", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) diff --git a/sdk/guides/custom-tools.mdx b/sdk/guides/custom-tools.mdx index 8426c10b..12a33b46 100644 --- a/sdk/guides/custom-tools.mdx +++ b/sdk/guides/custom-tools.mdx @@ -115,7 +115,7 @@ class GrepExecutor(ToolExecutor[GrepAction, GrepObservation]): def __init__(self, bash: BashExecutor): self.bash: BashExecutor = bash - def __call__(self, action: GrepAction) -> GrepObservation: + def __call__(self, action: GrepAction, conversation=None) -> GrepObservation: # noqa: ARG002 root = os.path.abspath(action.path) pat = shlex.quote(action.pattern) root_q = shlex.quote(root) diff --git a/sdk/guides/llm-routing.mdx b/sdk/guides/llm-routing.mdx index b76c392f..0766af67 100644 --- a/sdk/guides/llm-routing.mdx +++ b/sdk/guides/llm-routing.mdx @@ -48,7 +48,7 @@ primary_llm = LLM( secondary_llm = LLM( usage_id="agent-secondary", model="litellm_proxy/mistral/devstral-small-2507", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=base_url, api_key=SecretStr(api_key), ) multimodal_router = MultimodalRouter( diff --git a/sdk/guides/metrics.mdx b/sdk/guides/metrics.mdx index e8b73516..a960431a 100644 --- a/sdk/guides/metrics.mdx +++ b/sdk/guides/metrics.mdx @@ -332,7 +332,7 @@ conversation.run() second_llm = LLM( usage_id="demo-secondary", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) conversation.llm_registry.add(second_llm) From 229095cb4fe3831841fc4680196764c4ed3e4146 Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:56:49 +0200 Subject: [PATCH 03/11] SDK: Add Responses streaming guide to nav only (revert unintended docs.json churn) Co-authored-by: openhands From b1fd2b49e989660a92c1dcb644111840c36b0ff0 Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:58:41 +0200 Subject: [PATCH 04/11] SDK: Minimize docs.json diff to only add Responses streaming nav entry Co-authored-by: openhands --- docs.json | 224 +++++++++++++++++++----------------------------------- 1 file changed, 77 insertions(+), 147 deletions(-) diff --git a/docs.json b/docs.json index 6916f2b1..7f194fc8 100644 --- a/docs.json +++ b/docs.json @@ -65,52 +65,52 @@ { "group": "Advanced Configuration", "pages": [ - { - "group": "LLM Configuration", - "pages": [ - "openhands/usage/llms/llms", - { - "group": "Providers", - "pages": [ - "openhands/usage/llms/openhands-llms", - "openhands/usage/llms/azure-llms", - "openhands/usage/llms/google-llms", - "openhands/usage/llms/groq", - "openhands/usage/llms/local-llms", - "openhands/usage/llms/litellm-proxy", - "openhands/usage/llms/moonshot", - "openhands/usage/llms/openai-llms", - "openhands/usage/llms/openrouter" - ] - } - ] - }, - { - "group": "Runtime Configuration", - "pages": [ - "openhands/usage/runtimes/overview", - { - "group": "Providers", - "pages": [ - "openhands/usage/runtimes/docker", - "openhands/usage/runtimes/remote", - "openhands/usage/runtimes/local", - { - "group": "Third-Party Providers", - "pages": [ - "openhands/usage/runtimes/modal", - "openhands/usage/runtimes/daytona", - "openhands/usage/runtimes/runloop", - "openhands/usage/runtimes/e2b" - ] - } - ] - } - ] - }, - "openhands/usage/advanced/configuration-options", - "openhands/usage/advanced/custom-sandbox-guide", - "openhands/usage/advanced/search-engine-setup" + { + "group": "LLM Configuration", + "pages": [ + "openhands/usage/llms/llms", + { + "group": "Providers", + "pages": [ + "openhands/usage/llms/openhands-llms", + "openhands/usage/llms/azure-llms", + "openhands/usage/llms/google-llms", + "openhands/usage/llms/groq", + "openhands/usage/llms/local-llms", + "openhands/usage/llms/litellm-proxy", + "openhands/usage/llms/moonshot", + "openhands/usage/llms/openai-llms", + "openhands/usage/llms/openrouter" + ] + } + ] + }, + { + "group": "Runtime Configuration", + "pages": [ + "openhands/usage/runtimes/overview", + { + "group": "Providers", + "pages": [ + "openhands/usage/runtimes/docker", + "openhands/usage/runtimes/remote", + "openhands/usage/runtimes/local", + { + "group": "Third-Party Providers", + "pages": [ + "openhands/usage/runtimes/modal", + "openhands/usage/runtimes/daytona", + "openhands/usage/runtimes/runloop", + "openhands/usage/runtimes/e2b" + ] + } + ] + } + ] + }, + "openhands/usage/advanced/configuration-options", + "openhands/usage/advanced/custom-sandbox-guide", + "openhands/usage/advanced/search-engine-setup" ] } ] @@ -235,7 +235,8 @@ }, { "group": "GitHub Workflows", - "pages": [] + "pages": [ + ] } ] }, @@ -261,8 +262,8 @@ ] }, { - "tab": "OpenHands (Core) API", - "openapi": "openapi/openapi.json" + "tab": "OpenHands (Core) API", + "openapi": "openapi/openapi.json" } ], "global": { @@ -290,7 +291,8 @@ "dark": "/logo/dark.png" }, "navbar": { - "links": [], + "links": [ + ], "primary": { "type": "github", "href": "https://github.com/All-Hands-AI/OpenHands" @@ -303,7 +305,7 @@ } }, "banner": { - "content": "\ud83d\udce2 **GitHub Org Rename:** All-Hands-AI to OpenHands on Monday Oct 20th at 18:00 UTC. [Migration details \u2192](https://github.com/All-Hands-AI/OpenHands/issues/11376)", + "content": "šŸ“¢ **GitHub Org Rename:** All-Hands-AI to OpenHands on Monday Oct 20th at 18:00 UTC. [Migration details →](https://github.com/All-Hands-AI/OpenHands/issues/11376)", "dismissible": true }, "head": [ @@ -323,101 +325,29 @@ ] }, "redirects": [ - { - "source": "/modules/:slug*", - "destination": "/:slug*" - }, - { - "source": "/usage/:slug*", - "destination": "/openhands/usage/:slug*" - }, - { - "source": "/openhands/usage/configuration-options", - "destination": "/openhands/usage/advanced/configuration-options" - }, - { - "source": "/openhands/usage/how-to/custom-sandbox-guide", - "destination": "/openhands/usage/advanced/custom-sandbox-guide" - }, - { - "source": "/openhands/usage/search-engine-setup", - "destination": "/openhands/usage/advanced/search-engine-setup" - }, - { - "source": "/openhands/usage/prompting/repository", - "destination": "/openhands/usage/customization/repository" - }, - { - "source": "/openhands/usage/how-to/debugging", - "destination": "/openhands/usage/developers/debugging" - }, - { - "source": "/openhands/usage/how-to/development-overview", - "destination": "/openhands/usage/developers/development-overview" - }, - { - "source": "/openhands/usage/how-to/evaluation-harness", - "destination": "/openhands/usage/developers/evaluation-harness" - }, - { - "source": "/openhands/usage/how-to/websocket-connection", - "destination": "/openhands/usage/developers/websocket-connection" - }, - { - "source": "/openhands/usage/prompting/microagents-keyword", - "destination": "/openhands/usage/microagents/microagents-keyword" - }, - { - "source": "/openhands/usage/prompting/microagents-org", - "destination": "/openhands/usage/microagents/microagents-org" - }, - { - "source": "/openhands/usage/prompting/microagents-overview", - "destination": "/openhands/usage/microagents/microagents-overview" - }, - { - "source": "/openhands/usage/prompting/microagents-public", - "destination": "/openhands/usage/microagents/microagents-public" - }, - { - "source": "/openhands/usage/prompting/microagents-repo", - "destination": "/openhands/usage/microagents/microagents-repo" - }, - { - "source": "/openhands/usage/installation", - "destination": "/openhands/usage/quick-start" - }, - { - "source": "/openhands/usage/how-to/cli-mode", - "destination": "/openhands/usage/run-openhands/cli-mode" - }, - { - "source": "/openhands/usage/how-to/github-action", - "destination": "/openhands/usage/run-openhands/github-action" - }, - { - "source": "/openhands/usage/how-to/gui-mode", - "destination": "/openhands/usage/run-openhands/gui-mode" - }, - { - "source": "/openhands/usage/how-to/headless-mode", - "destination": "/openhands/usage/run-openhands/headless-mode" - }, - { - "source": "/openhands/usage/local-setup", - "destination": "/openhands/usage/run-openhands/local-setup" - }, - { - "source": "/openhands/usage/getting-started", - "destination": "/openhands/usage/start-building" - }, - { - "source": "/openhands/usage/prompting/prompting-best-practices", - "destination": "/openhands/usage/tips/prompting-best-practices" - }, - { - "source": "/openhands/usage/feedback", - "destination": "/openhands/usage/troubleshooting/feedback" - } + { "source": "/modules/:slug*", "destination": "/:slug*"}, + { "source": "/usage/:slug*", "destination": "/openhands/usage/:slug*"}, + { "source": "/openhands/usage/configuration-options", "destination": "/openhands/usage/advanced/configuration-options" }, + { "source": "/openhands/usage/how-to/custom-sandbox-guide", "destination": "/openhands/usage/advanced/custom-sandbox-guide" }, + { "source": "/openhands/usage/search-engine-setup", "destination": "/openhands/usage/advanced/search-engine-setup" }, + { "source": "/openhands/usage/prompting/repository", "destination": "/openhands/usage/customization/repository" }, + { "source": "/openhands/usage/how-to/debugging", "destination": "/openhands/usage/developers/debugging" }, + { "source": "/openhands/usage/how-to/development-overview", "destination": "/openhands/usage/developers/development-overview" }, + { "source": "/openhands/usage/how-to/evaluation-harness", "destination": "/openhands/usage/developers/evaluation-harness" }, + { "source": "/openhands/usage/how-to/websocket-connection", "destination": "/openhands/usage/developers/websocket-connection" }, + { "source": "/openhands/usage/prompting/microagents-keyword", "destination": "/openhands/usage/microagents/microagents-keyword" }, + { "source": "/openhands/usage/prompting/microagents-org", "destination": "/openhands/usage/microagents/microagents-org" }, + { "source": "/openhands/usage/prompting/microagents-overview", "destination": "/openhands/usage/microagents/microagents-overview" }, + { "source": "/openhands/usage/prompting/microagents-public", "destination": "/openhands/usage/microagents/microagents-public" }, + { "source": "/openhands/usage/prompting/microagents-repo", "destination": "/openhands/usage/microagents/microagents-repo" }, + { "source": "/openhands/usage/installation", "destination": "/openhands/usage/quick-start" }, + { "source": "/openhands/usage/how-to/cli-mode", "destination": "/openhands/usage/run-openhands/cli-mode" }, + { "source": "/openhands/usage/how-to/github-action", "destination": "/openhands/usage/run-openhands/github-action" }, + { "source": "/openhands/usage/how-to/gui-mode", "destination": "/openhands/usage/run-openhands/gui-mode" }, + { "source": "/openhands/usage/how-to/headless-mode", "destination": "/openhands/usage/run-openhands/headless-mode" }, + { "source": "/openhands/usage/local-setup", "destination": "/openhands/usage/run-openhands/local-setup" }, + { "source": "/openhands/usage/getting-started", "destination": "/openhands/usage/start-building" }, + { "source": "/openhands/usage/prompting/prompting-best-practices", "destination": "/openhands/usage/tips/prompting-best-practices" }, + { "source": "/openhands/usage/feedback", "destination": "/openhands/usage/troubleshooting/feedback" } ] } \ No newline at end of file From d5e2d38b676c72248acd02af4c72d1833e028803 Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:58:42 +0200 Subject: [PATCH 05/11] Revert unintended changes in sdk/guides/agent-server/api-sandbox.mdx to match main Co-authored-by: openhands --- sdk/guides/agent-server/api-sandbox.mdx | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/guides/agent-server/api-sandbox.mdx b/sdk/guides/agent-server/api-sandbox.mdx index c58b0c1b..438391a6 100644 --- a/sdk/guides/agent-server/api-sandbox.mdx +++ b/sdk/guides/agent-server/api-sandbox.mdx @@ -23,7 +23,7 @@ Usage: uv run examples/24_remote_convo_with_api_sandboxed_server.py Requirements: - - LLM_API_KEY: API key for LLM access + - LITELLM_API_KEY: API key for LLM access - RUNTIME_API_KEY: API key for runtime API access """ @@ -45,13 +45,13 @@ from openhands.workspace import APIRemoteWorkspace logger = get_logger(__name__) -api_key = os.getenv("LLM_API_KEY") -assert api_key, "LLM_API_KEY required" +api_key = os.getenv("LITELLM_API_KEY") +assert api_key, "LITELLM_API_KEY required" llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url=os.getenv("LLM_BASE_URL"), + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) @@ -62,7 +62,7 @@ if not runtime_api_key: with APIRemoteWorkspace( - runtime_api_url=os.getenv("RUNTIME_API_URL", "https://runtime.eval.all-hands.dev"), + runtime_api_url="https://runtime.eval.all-hands.dev", runtime_api_key=runtime_api_key, server_image="ghcr.io/openhands/agent-server:main-python", ) as workspace: From 1592ebec980c088b4bab616deb9877ff5ee1c047 Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:58:43 +0200 Subject: [PATCH 06/11] Revert unintended changes in sdk/guides/agent-server/docker-sandbox.mdx to match main Co-authored-by: openhands --- sdk/guides/agent-server/docker-sandbox.mdx | 26 +++++++--------------- 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/sdk/guides/agent-server/docker-sandbox.mdx b/sdk/guides/agent-server/docker-sandbox.mdx index 6d76bde0..e07b44da 100644 --- a/sdk/guides/agent-server/docker-sandbox.mdx +++ b/sdk/guides/agent-server/docker-sandbox.mdx @@ -44,7 +44,7 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url=os.getenv("LLM_BASE_URL"), + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) @@ -258,30 +258,19 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url=os.getenv("LLM_BASE_URL"), + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) # Create a Docker-based remote workspace with extra ports for VSCode access - - -def detect_platform(): - """Detects the correct Docker platform string.""" - import platform - - machine = platform.machine().lower() - if "arm" in machine or "aarch64" in machine: - return "linux/arm64" - return "linux/amd64" - - with DockerWorkspace( base_image="nikolaik/python-nodejs:python3.12-nodejs22", host_port=18010, - platform=detect_platform(), + # TODO: Change this to your platform if not linux/arm64 + platform="linux/arm64", extra_ports=True, # Expose extra ports for VSCode and VNC ) as workspace: - """Extra ports allows you to access VSCode at localhost:18011""" + """Extra ports allows you to access VSCode at localhost:8011""" # Create agent agent = get_default_agent( @@ -452,7 +441,7 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url=os.getenv("LLM_BASE_URL"), + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) @@ -469,6 +458,7 @@ def detect_platform(): with DockerWorkspace( base_image="nikolaik/python-nodejs:python3.12-nodejs22", host_port=8010, + # TODO: Change this to your platform if not linux/arm64 platform=detect_platform(), extra_ports=True, # Expose extra ports for VSCode and VNC ) as workspace: @@ -502,7 +492,7 @@ with DockerWorkspace( logger.info(f"\nšŸ“‹ Conversation ID: {conversation.state.id}") logger.info("šŸ“ Sending first message...") conversation.send_message( - "Could you go to https://openhands.dev/ blog page and summarize main " + "Could you go to https://all-hands.dev/ blog page and summarize main " "points of the latest blog?" ) conversation.run() From 805c1e31a799c7aceeec8c2412ae49ceaf4f4765 Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:58:44 +0200 Subject: [PATCH 07/11] Revert unintended changes in sdk/guides/agent-server/local-server.mdx to match main Co-authored-by: openhands --- sdk/guides/agent-server/local-server.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sdk/guides/agent-server/local-server.mdx b/sdk/guides/agent-server/local-server.mdx index 3aa92104..0c0b3b1c 100644 --- a/sdk/guides/agent-server/local-server.mdx +++ b/sdk/guides/agent-server/local-server.mdx @@ -139,13 +139,13 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url=os.getenv("LLM_BASE_URL"), + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) title_gen_llm = LLM( usage_id="title-gen-llm", model="litellm_proxy/openai/gpt-5-mini", - base_url=os.getenv("LLM_BASE_URL"), + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) From 17e800c6051db3a8fdc69c6b5a88b68010c909ae Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:58:45 +0200 Subject: [PATCH 08/11] Revert unintended changes in sdk/guides/custom-tools.mdx to match main Co-authored-by: openhands --- sdk/guides/custom-tools.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/guides/custom-tools.mdx b/sdk/guides/custom-tools.mdx index 12a33b46..8426c10b 100644 --- a/sdk/guides/custom-tools.mdx +++ b/sdk/guides/custom-tools.mdx @@ -115,7 +115,7 @@ class GrepExecutor(ToolExecutor[GrepAction, GrepObservation]): def __init__(self, bash: BashExecutor): self.bash: BashExecutor = bash - def __call__(self, action: GrepAction, conversation=None) -> GrepObservation: # noqa: ARG002 + def __call__(self, action: GrepAction) -> GrepObservation: root = os.path.abspath(action.path) pat = shlex.quote(action.pattern) root_q = shlex.quote(root) From 3d29b3aedc2759aaaebc67a53afc2e2ad8943ab5 Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:58:46 +0200 Subject: [PATCH 09/11] Revert unintended changes in sdk/guides/llm-routing.mdx to match main Co-authored-by: openhands --- sdk/guides/llm-routing.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/guides/llm-routing.mdx b/sdk/guides/llm-routing.mdx index 0766af67..b76c392f 100644 --- a/sdk/guides/llm-routing.mdx +++ b/sdk/guides/llm-routing.mdx @@ -48,7 +48,7 @@ primary_llm = LLM( secondary_llm = LLM( usage_id="agent-secondary", model="litellm_proxy/mistral/devstral-small-2507", - base_url=base_url, + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) multimodal_router = MultimodalRouter( From 920eed03f350015ea0e39661978f91baee81d10a Mon Sep 17 00:00:00 2001 From: openhands Date: Thu, 23 Oct 2025 19:58:46 +0200 Subject: [PATCH 10/11] Revert unintended changes in sdk/guides/metrics.mdx to match main Co-authored-by: openhands --- sdk/guides/metrics.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/guides/metrics.mdx b/sdk/guides/metrics.mdx index a960431a..e8b73516 100644 --- a/sdk/guides/metrics.mdx +++ b/sdk/guides/metrics.mdx @@ -332,7 +332,7 @@ conversation.run() second_llm = LLM( usage_id="demo-secondary", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url=os.getenv("LLM_BASE_URL"), + base_url="https://llm-proxy.eval.all-hands.dev", api_key=SecretStr(api_key), ) conversation.llm_registry.add(second_llm) From 7ed79751b65a5d703a2bc1bb58c2998c40708808 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Thu, 23 Oct 2025 17:59:08 +0000 Subject: [PATCH 11/11] docs: sync code blocks from agent-sdk examples Synced from agent-sdk ref: main --- sdk/guides/agent-server/api-sandbox.mdx | 10 ++++----- sdk/guides/agent-server/docker-sandbox.mdx | 26 +++++++++++++++------- sdk/guides/agent-server/local-server.mdx | 4 ++-- sdk/guides/custom-tools.mdx | 2 +- sdk/guides/llm-routing.mdx | 2 +- sdk/guides/metrics.mdx | 2 +- 6 files changed, 28 insertions(+), 18 deletions(-) diff --git a/sdk/guides/agent-server/api-sandbox.mdx b/sdk/guides/agent-server/api-sandbox.mdx index 438391a6..c58b0c1b 100644 --- a/sdk/guides/agent-server/api-sandbox.mdx +++ b/sdk/guides/agent-server/api-sandbox.mdx @@ -23,7 +23,7 @@ Usage: uv run examples/24_remote_convo_with_api_sandboxed_server.py Requirements: - - LITELLM_API_KEY: API key for LLM access + - LLM_API_KEY: API key for LLM access - RUNTIME_API_KEY: API key for runtime API access """ @@ -45,13 +45,13 @@ from openhands.workspace import APIRemoteWorkspace logger = get_logger(__name__) -api_key = os.getenv("LITELLM_API_KEY") -assert api_key, "LITELLM_API_KEY required" +api_key = os.getenv("LLM_API_KEY") +assert api_key, "LLM_API_KEY required" llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) @@ -62,7 +62,7 @@ if not runtime_api_key: with APIRemoteWorkspace( - runtime_api_url="https://runtime.eval.all-hands.dev", + runtime_api_url=os.getenv("RUNTIME_API_URL", "https://runtime.eval.all-hands.dev"), runtime_api_key=runtime_api_key, server_image="ghcr.io/openhands/agent-server:main-python", ) as workspace: diff --git a/sdk/guides/agent-server/docker-sandbox.mdx b/sdk/guides/agent-server/docker-sandbox.mdx index e07b44da..6d76bde0 100644 --- a/sdk/guides/agent-server/docker-sandbox.mdx +++ b/sdk/guides/agent-server/docker-sandbox.mdx @@ -44,7 +44,7 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) @@ -258,19 +258,30 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) # Create a Docker-based remote workspace with extra ports for VSCode access + + +def detect_platform(): + """Detects the correct Docker platform string.""" + import platform + + machine = platform.machine().lower() + if "arm" in machine or "aarch64" in machine: + return "linux/arm64" + return "linux/amd64" + + with DockerWorkspace( base_image="nikolaik/python-nodejs:python3.12-nodejs22", host_port=18010, - # TODO: Change this to your platform if not linux/arm64 - platform="linux/arm64", + platform=detect_platform(), extra_ports=True, # Expose extra ports for VSCode and VNC ) as workspace: - """Extra ports allows you to access VSCode at localhost:8011""" + """Extra ports allows you to access VSCode at localhost:18011""" # Create agent agent = get_default_agent( @@ -441,7 +452,7 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) @@ -458,7 +469,6 @@ def detect_platform(): with DockerWorkspace( base_image="nikolaik/python-nodejs:python3.12-nodejs22", host_port=8010, - # TODO: Change this to your platform if not linux/arm64 platform=detect_platform(), extra_ports=True, # Expose extra ports for VSCode and VNC ) as workspace: @@ -492,7 +502,7 @@ with DockerWorkspace( logger.info(f"\nšŸ“‹ Conversation ID: {conversation.state.id}") logger.info("šŸ“ Sending first message...") conversation.send_message( - "Could you go to https://all-hands.dev/ blog page and summarize main " + "Could you go to https://openhands.dev/ blog page and summarize main " "points of the latest blog?" ) conversation.run() diff --git a/sdk/guides/agent-server/local-server.mdx b/sdk/guides/agent-server/local-server.mdx index 0c0b3b1c..3aa92104 100644 --- a/sdk/guides/agent-server/local-server.mdx +++ b/sdk/guides/agent-server/local-server.mdx @@ -139,13 +139,13 @@ assert api_key is not None, "LLM_API_KEY environment variable is not set." llm = LLM( usage_id="agent", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) title_gen_llm = LLM( usage_id="title-gen-llm", model="litellm_proxy/openai/gpt-5-mini", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) diff --git a/sdk/guides/custom-tools.mdx b/sdk/guides/custom-tools.mdx index 8426c10b..12a33b46 100644 --- a/sdk/guides/custom-tools.mdx +++ b/sdk/guides/custom-tools.mdx @@ -115,7 +115,7 @@ class GrepExecutor(ToolExecutor[GrepAction, GrepObservation]): def __init__(self, bash: BashExecutor): self.bash: BashExecutor = bash - def __call__(self, action: GrepAction) -> GrepObservation: + def __call__(self, action: GrepAction, conversation=None) -> GrepObservation: # noqa: ARG002 root = os.path.abspath(action.path) pat = shlex.quote(action.pattern) root_q = shlex.quote(root) diff --git a/sdk/guides/llm-routing.mdx b/sdk/guides/llm-routing.mdx index b76c392f..0766af67 100644 --- a/sdk/guides/llm-routing.mdx +++ b/sdk/guides/llm-routing.mdx @@ -48,7 +48,7 @@ primary_llm = LLM( secondary_llm = LLM( usage_id="agent-secondary", model="litellm_proxy/mistral/devstral-small-2507", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=base_url, api_key=SecretStr(api_key), ) multimodal_router = MultimodalRouter( diff --git a/sdk/guides/metrics.mdx b/sdk/guides/metrics.mdx index e8b73516..a960431a 100644 --- a/sdk/guides/metrics.mdx +++ b/sdk/guides/metrics.mdx @@ -332,7 +332,7 @@ conversation.run() second_llm = LLM( usage_id="demo-secondary", model="litellm_proxy/anthropic/claude-sonnet-4-5-20250929", - base_url="https://llm-proxy.eval.all-hands.dev", + base_url=os.getenv("LLM_BASE_URL"), api_key=SecretStr(api_key), ) conversation.llm_registry.add(second_llm)