Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
name: CI
on:
push:
branches-ignore:
- 'generated'
- 'codegen/**'
- 'integrated/**'
- 'stl-preview-head/**'
- 'stl-preview-base/**'
branches:
- '**'
- '!integrated/**'
- '!stl-preview-head/**'
- '!stl-preview-base/**'
- '!generated'
- '!codegen/**'
- 'codegen/stl/**'
pull_request:
branches-ignore:
- 'stl-preview-head/**'
Expand Down
2 changes: 1 addition & 1 deletion .release-please-manifest.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
".": "3.0.0"
".": "3.1.0"
}
4 changes: 2 additions & 2 deletions .stats.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
configured_endpoints: 84
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/droidrun%2Fdroidrun-cloud-7187108bdaaacaa810e3fa1ecd7da0b7242ac5361ad299a94c34cdf258c2735c.yml
openapi_spec_hash: 33fedb3f0532192e0e91196dd7c3da12
openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/droidrun%2Fdroidrun-cloud-95a4ffa336c4635145ec6a8a114a1f38997f2cbc1bca80777fbcd91929d6d7f5.yml
openapi_spec_hash: 4fc5a0b864310d5f1e1d2a3093ecbbf9
config_hash: 6addf54d60509e5161fde56d48805e01
20 changes: 20 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,25 @@
# Changelog

## 3.1.0 (2026-03-18)

Full Changelog: [v3.0.0...v3.1.0](https://github.com/droidrun/mobilerun-sdk-python/compare/v3.0.0...v3.1.0)

### Features

* **api:** api update ([6eb34d8](https://github.com/droidrun/mobilerun-sdk-python/commit/6eb34d80a85d1bb428a2e143d879cfb40c9dcc67))
* **api:** api update ([8950157](https://github.com/droidrun/mobilerun-sdk-python/commit/8950157a39d357a5f4ba52454f558c67fb7f5a6f))


### Bug Fixes

* **deps:** bump minimum typing-extensions version ([e5ee758](https://github.com/droidrun/mobilerun-sdk-python/commit/e5ee7584ab8e87a070dc8a33d27b41278a1d046a))
* **pydantic:** do not pass `by_alias` unless set ([8ef6593](https://github.com/droidrun/mobilerun-sdk-python/commit/8ef6593e4873afe4ac6d3504aa9f53b9f6cf3b2b))


### Chores

* **internal:** tweak CI branches ([094cdf3](https://github.com/droidrun/mobilerun-sdk-python/commit/094cdf30b65e93bc28bb7b3167d391b2b3524193))

## 3.0.0 (2026-03-15)

Full Changelog: [v2.1.0...v3.0.0](https://github.com/droidrun/mobilerun-sdk-python/compare/v2.1.0...v3.0.0)
Expand Down
4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "mobilerun-sdk"
version = "3.0.0"
version = "3.1.0"
description = "The official Python library for the mobilerun API"
dynamic = ["readme"]
license = "Apache-2.0"
Expand All @@ -11,7 +11,7 @@ authors = [
dependencies = [
"httpx>=0.23.0, <1",
"pydantic>=1.9.0, <3",
"typing-extensions>=4.10, <5",
"typing-extensions>=4.14, <5",
"anyio>=3.5.0, <5",
"distro>=1.7.0, <2",
"sniffio",
Expand Down
11 changes: 9 additions & 2 deletions src/mobilerun/_compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload
from datetime import date, datetime
from typing_extensions import Self, Literal
from typing_extensions import Self, Literal, TypedDict

import pydantic
from pydantic.fields import FieldInfo
Expand Down Expand Up @@ -131,6 +131,10 @@ def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str:
return model.model_dump_json(indent=indent)


class _ModelDumpKwargs(TypedDict, total=False):
by_alias: bool


def model_dump(
model: pydantic.BaseModel,
*,
Expand All @@ -142,14 +146,17 @@ def model_dump(
by_alias: bool | None = None,
) -> dict[str, Any]:
if (not PYDANTIC_V1) or hasattr(model, "model_dump"):
kwargs: _ModelDumpKwargs = {}
if by_alias is not None:
kwargs["by_alias"] = by_alias
return model.model_dump(
mode=mode,
exclude=exclude,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
# warnings are not supported in Pydantic v1
warnings=True if PYDANTIC_V1 else warnings,
by_alias=by_alias,
**kwargs,
)
return cast(
"dict[str, Any]",
Expand Down
2 changes: 1 addition & 1 deletion src/mobilerun/_version.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

__title__ = "mobilerun"
__version__ = "3.0.0" # x-release-please-version
__version__ = "3.1.0" # x-release-please-version
16 changes: 16 additions & 0 deletions src/mobilerun/resources/tasks/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ def run(
output_schema: Optional[Dict[str, object]] | Omit = omit,
reasoning: bool | Omit = omit,
stealth: bool | Omit = omit,
subagent_model: str | Omit = omit,
temperature: float | Omit = omit,
vision: bool | Omit = omit,
vpn_country: Optional[Literal["US", "BR", "FR", "DE", "IN", "JP", "KR", "ZA"]] | Omit = omit,
Expand All @@ -304,6 +305,8 @@ def run(
llm_model: The LLM model identifier to use for the task (e.g.
'google/gemini-3.1-flash-lite-preview')

subagent_model: LLM model used by sub-agent roles: executor, app_opener, structured_output

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -329,6 +332,7 @@ def run(
"output_schema": output_schema,
"reasoning": reasoning,
"stealth": stealth,
"subagent_model": subagent_model,
"temperature": temperature,
"vision": vision,
"vpn_country": vpn_country,
Expand Down Expand Up @@ -357,6 +361,7 @@ def run_streamed(
output_schema: Optional[Dict[str, object]] | Omit = omit,
reasoning: bool | Omit = omit,
stealth: bool | Omit = omit,
subagent_model: str | Omit = omit,
temperature: float | Omit = omit,
vision: bool | Omit = omit,
vpn_country: Optional[Literal["US", "BR", "FR", "DE", "IN", "JP", "KR", "ZA"]] | Omit = omit,
Expand All @@ -379,6 +384,8 @@ def run_streamed(
llm_model: The LLM model identifier to use for the task (e.g.
'google/gemini-3.1-flash-lite-preview')

subagent_model: LLM model used by sub-agent roles: executor, app_opener, structured_output

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -404,6 +411,7 @@ def run_streamed(
"output_schema": output_schema,
"reasoning": reasoning,
"stealth": stealth,
"subagent_model": subagent_model,
"temperature": temperature,
"vision": vision,
"vpn_country": vpn_country,
Expand Down Expand Up @@ -724,6 +732,7 @@ async def run(
output_schema: Optional[Dict[str, object]] | Omit = omit,
reasoning: bool | Omit = omit,
stealth: bool | Omit = omit,
subagent_model: str | Omit = omit,
temperature: float | Omit = omit,
vision: bool | Omit = omit,
vpn_country: Optional[Literal["US", "BR", "FR", "DE", "IN", "JP", "KR", "ZA"]] | Omit = omit,
Expand All @@ -747,6 +756,8 @@ async def run(
llm_model: The LLM model identifier to use for the task (e.g.
'google/gemini-3.1-flash-lite-preview')

subagent_model: LLM model used by sub-agent roles: executor, app_opener, structured_output

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -772,6 +783,7 @@ async def run(
"output_schema": output_schema,
"reasoning": reasoning,
"stealth": stealth,
"subagent_model": subagent_model,
"temperature": temperature,
"vision": vision,
"vpn_country": vpn_country,
Expand Down Expand Up @@ -800,6 +812,7 @@ async def run_streamed(
output_schema: Optional[Dict[str, object]] | Omit = omit,
reasoning: bool | Omit = omit,
stealth: bool | Omit = omit,
subagent_model: str | Omit = omit,
temperature: float | Omit = omit,
vision: bool | Omit = omit,
vpn_country: Optional[Literal["US", "BR", "FR", "DE", "IN", "JP", "KR", "ZA"]] | Omit = omit,
Expand All @@ -822,6 +835,8 @@ async def run_streamed(
llm_model: The LLM model identifier to use for the task (e.g.
'google/gemini-3.1-flash-lite-preview')

subagent_model: LLM model used by sub-agent roles: executor, app_opener, structured_output

extra_headers: Send extra headers

extra_query: Add additional query parameters to the request
Expand All @@ -847,6 +862,7 @@ async def run_streamed(
"output_schema": output_schema,
"reasoning": reasoning,
"stealth": stealth,
"subagent_model": subagent_model,
"temperature": temperature,
"vision": vision,
"vpn_country": vpn_country,
Expand Down
2 changes: 2 additions & 0 deletions src/mobilerun/types/agent_list_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ class AgentListResponseItem(BaseModel):

reasoning: bool

subagent_model: Optional[str] = FieldInfo(alias="subagentModel", default=None)

vision: bool


Expand Down
2 changes: 1 addition & 1 deletion src/mobilerun/types/devices/time_timezone_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@


class TimeTimezoneResponse(BaseModel):
timezone: str
timezone: Optional[str] = None

schema_: Optional[str] = FieldInfo(alias="$schema", default=None)
"""A URL to the JSON Schema for this object."""
3 changes: 3 additions & 0 deletions src/mobilerun/types/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,9 @@ class Task(BaseModel):

steps: Optional[int] = None

subagent_model: Optional[str] = FieldInfo(alias="subagentModel", default=None)
"""LLM model used by sub-agent roles: executor, app_opener, structured_output"""

succeeded: Optional[bool] = None

temperature: Optional[float] = None
Expand Down
3 changes: 3 additions & 0 deletions src/mobilerun/types/task_run_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ class TaskRunParams(TypedDict, total=False):

stealth: bool

subagent_model: Annotated[str, PropertyInfo(alias="subagentModel")]
"""LLM model used by sub-agent roles: executor, app_opener, structured_output"""

temperature: float

vision: bool
Expand Down
3 changes: 3 additions & 0 deletions src/mobilerun/types/task_run_streamed_params.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ class TaskRunStreamedParams(TypedDict, total=False):

stealth: bool

subagent_model: Annotated[str, PropertyInfo(alias="subagentModel")]
"""LLM model used by sub-agent roles: executor, app_opener, structured_output"""

temperature: float

vision: bool
Expand Down
4 changes: 4 additions & 0 deletions tests/api_resources/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,6 +265,7 @@ def test_method_run_with_all_params(self, client: Mobilerun) -> None:
output_schema={"foo": "bar"},
reasoning=True,
stealth=True,
subagent_model="subagentModel",
temperature=0,
vision=True,
vpn_country="US",
Expand Down Expand Up @@ -330,6 +331,7 @@ def test_method_run_streamed_with_all_params(self, client: Mobilerun) -> None:
output_schema={"foo": "bar"},
reasoning=True,
stealth=True,
subagent_model="subagentModel",
temperature=0,
vision=True,
vpn_country="US",
Expand Down Expand Up @@ -698,6 +700,7 @@ async def test_method_run_with_all_params(self, async_client: AsyncMobilerun) ->
output_schema={"foo": "bar"},
reasoning=True,
stealth=True,
subagent_model="subagentModel",
temperature=0,
vision=True,
vpn_country="US",
Expand Down Expand Up @@ -763,6 +766,7 @@ async def test_method_run_streamed_with_all_params(self, async_client: AsyncMobi
output_schema={"foo": "bar"},
reasoning=True,
stealth=True,
subagent_model="subagentModel",
temperature=0,
vision=True,
vpn_country="US",
Expand Down