diff --git a/pyproject.toml b/pyproject.toml index b4c44ecb..124350b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "uipath-langchain" -version = "0.0.144" +version = "0.0.145" description = "UiPath Langchain" readme = { file = "README.md", content-type = "text/markdown" } requires-python = ">=3.10" dependencies = [ - "uipath>=2.1.103, <2.2.0", + "uipath>=2.1.110, <2.2.0", "langgraph>=0.5.0, <0.7.0", "langchain-core>=0.3.34", "langgraph-checkpoint-sqlite>=2.0.3", @@ -111,4 +111,4 @@ asyncio_mode = "auto" name = "testpypi" url = "https://test.pypi.org/simple/" publish-url = "https://test.pypi.org/legacy/" -explicit = true \ No newline at end of file +explicit = true diff --git a/src/uipath_langchain/_cli/_runtime/_runtime.py b/src/uipath_langchain/_cli/_runtime/_runtime.py index 7169f6b3..662a9b2f 100644 --- a/src/uipath_langchain/_cli/_runtime/_runtime.py +++ b/src/uipath_langchain/_cli/_runtime/_runtime.py @@ -2,12 +2,14 @@ import os from contextlib import asynccontextmanager from typing import Any, AsyncGenerator, AsyncIterator, Optional, Sequence +from uuid import uuid4 from langchain_core.runnables.config import RunnableConfig from langgraph.checkpoint.sqlite.aio import AsyncSqliteSaver from langgraph.errors import EmptyInputError, GraphRecursionError, InvalidUpdateError from langgraph.graph.state import CompiledStateGraph, StateGraph from langgraph.types import Interrupt, StateSnapshot +from typing_extensions import override from uipath._cli._runtime._contracts import ( UiPathBaseRuntime, UiPathBreakpointResult, @@ -17,12 +19,14 @@ UiPathRuntimeResult, UiPathRuntimeStatus, ) +from uipath._cli.models.runtime_schema import Entrypoint from uipath._events._events import ( UiPathAgentMessageEvent, UiPathAgentStateEvent, UiPathRuntimeEvent, ) +from .._utils._schema import generate_schema_from_graph from ._context import LangGraphRuntimeContext from ._exception import LangGraphErrorCode, LangGraphRuntimeError from ._graph_resolver import AsyncResolver, LangGraphJsonResolver @@ -481,6 +485,21 @@ def __init__( self.resolver = LangGraphJsonResolver(entrypoint=entrypoint) super().__init__(context, self.resolver) + @override + async def get_entrypoint(self) -> Entrypoint: + """Get entrypoint for this LangGraph runtime.""" + graph = await self.resolver() + compiled_graph = graph.compile() + schema = generate_schema_from_graph(compiled_graph) + + return Entrypoint( + file_path=self.context.entrypoint, # type: ignore[call-arg] + unique_id=str(uuid4()), + type="agent", + input=schema["input"], + output=schema["output"], + ) + async def cleanup(self) -> None: """Cleanup runtime resources including resolver.""" await super().cleanup() diff --git a/src/uipath_langchain/_cli/_utils/_schema.py b/src/uipath_langchain/_cli/_utils/_schema.py new file mode 100644 index 00000000..d7bfe991 --- /dev/null +++ b/src/uipath_langchain/_cli/_utils/_schema.py @@ -0,0 +1,85 @@ +from typing import Any, Dict + +from langgraph.graph.state import CompiledStateGraph + + +def resolve_refs(schema, root=None): + """Recursively resolves $ref references in a JSON schema.""" + if root is None: + root = schema # Store the root schema to resolve $refs + + if isinstance(schema, dict): + if "$ref" in schema: + ref_path = schema["$ref"].lstrip("#/").split("/") + ref_schema = root + for part in ref_path: + ref_schema = ref_schema.get(part, {}) + return resolve_refs(ref_schema, root) + + return {k: resolve_refs(v, root) for k, v in schema.items()} + + elif isinstance(schema, list): + return [resolve_refs(item, root) for item in schema] + + return schema + + +def process_nullable_types( + schema: Dict[str, Any] | list[Any] | Any, +) -> Dict[str, Any] | list[Any]: + """Process the schema to handle nullable types by removing anyOf with null and keeping the base type.""" + if isinstance(schema, dict): + if "anyOf" in schema and len(schema["anyOf"]) == 2: + types = [t.get("type") for t in schema["anyOf"]] + if "null" in types: + non_null_type = next( + t for t in schema["anyOf"] if t.get("type") != "null" + ) + return non_null_type + + return {k: process_nullable_types(v) for k, v in schema.items()} + elif isinstance(schema, list): + return [process_nullable_types(item) for item in schema] + return schema + + +def generate_schema_from_graph( + graph: CompiledStateGraph[Any, Any, Any], +) -> Dict[str, Any]: + """Extract input/output schema from a LangGraph graph""" + schema = { + "input": {"type": "object", "properties": {}, "required": []}, + "output": {"type": "object", "properties": {}, "required": []}, + } + + if hasattr(graph, "input_schema"): + if hasattr(graph.input_schema, "model_json_schema"): + input_schema = graph.input_schema.model_json_schema() + unpacked_ref_def_properties = resolve_refs(input_schema) + + # Process the schema to handle nullable types + processed_properties = process_nullable_types( + unpacked_ref_def_properties.get("properties", {}) + ) + + schema["input"]["properties"] = processed_properties + schema["input"]["required"] = unpacked_ref_def_properties.get( + "required", [] + ) + + if hasattr(graph, "output_schema"): + if hasattr(graph.output_schema, "model_json_schema"): + output_schema = graph.output_schema.model_json_schema() + unpacked_ref_def_properties = resolve_refs(output_schema) + + # Process the schema to handle nullable types + processed_properties = process_nullable_types( + unpacked_ref_def_properties.get("properties", {}) + ) + + schema["output"]["properties"] = processed_properties + schema["output"]["required"] = unpacked_ref_def_properties.get( + "required", [] + ) + + return schema diff --git a/src/uipath_langchain/_cli/cli_eval.py b/src/uipath_langchain/_cli/cli_eval.py index 31d4235e..5b5d5f48 100644 --- a/src/uipath_langchain/_cli/cli_eval.py +++ b/src/uipath_langchain/_cli/cli_eval.py @@ -6,8 +6,9 @@ get_current_span, ) from uipath._cli._evals._console_progress_reporter import ConsoleProgressReporter +from uipath._cli._evals._evaluate import evaluate from uipath._cli._evals._progress_reporter import StudioWebProgressReporter -from uipath._cli._evals._runtime import UiPathEvalContext, UiPathEvalRuntime +from uipath._cli._evals._runtime import UiPathEvalContext from uipath._cli._runtime._contracts import ( UiPathRuntimeFactory, ) @@ -82,14 +83,7 @@ def generate_runtime(ctx: LangGraphRuntimeContext) -> LangGraphScriptRuntime: runtime_factory.add_instrumentor(LangChainInstrumentor, get_current_span) - async def execute(): - async with UiPathEvalRuntime.from_eval_context( - factory=runtime_factory, context=eval_context, event_bus=event_bus - ) as eval_runtime: - await eval_runtime.execute() - await event_bus.wait_for_all() - - asyncio.run(execute()) + asyncio.run(evaluate(runtime_factory, eval_context, event_bus)) return MiddlewareResult(should_continue=False) except Exception as e: diff --git a/src/uipath_langchain/_cli/cli_init.py b/src/uipath_langchain/_cli/cli_init.py index 427fa44c..31c0d790 100644 --- a/src/uipath_langchain/_cli/cli_init.py +++ b/src/uipath_langchain/_cli/cli_init.py @@ -6,7 +6,7 @@ import uuid from collections.abc import Generator from enum import Enum -from typing import Any, Callable, Dict, overload +from typing import Any, Callable, overload import click from langgraph.graph.state import CompiledStateGraph @@ -14,6 +14,8 @@ from uipath._cli._utils._parse_ast import generate_bindings_json # type: ignore from uipath._cli.middlewares import MiddlewareResult +from uipath_langchain._cli._utils._schema import generate_schema_from_graph + from ._utils._graph import LangGraphConfig console = ConsoleLogger() @@ -27,88 +29,6 @@ class FileOperationStatus(str, Enum): SKIPPED = "skipped" -def resolve_refs(schema, root=None): - """Recursively resolves $ref references in a JSON schema.""" - if root is None: - root = schema # Store the root schema to resolve $refs - - if isinstance(schema, dict): - if "$ref" in schema: - ref_path = schema["$ref"].lstrip("#/").split("/") - ref_schema = root - for part in ref_path: - ref_schema = ref_schema.get(part, {}) - return resolve_refs(ref_schema, root) - - return {k: resolve_refs(v, root) for k, v in schema.items()} - - elif isinstance(schema, list): - return [resolve_refs(item, root) for item in schema] - - return schema - - -def process_nullable_types( - schema: Dict[str, Any] | list[Any] | Any, -) -> Dict[str, Any] | list[Any]: - """Process the schema to handle nullable types by removing anyOf with null and keeping the base type.""" - if isinstance(schema, dict): - if "anyOf" in schema and len(schema["anyOf"]) == 2: - types = [t.get("type") for t in schema["anyOf"]] - if "null" in types: - non_null_type = next( - t for t in schema["anyOf"] if t.get("type") != "null" - ) - return non_null_type - - return {k: process_nullable_types(v) for k, v in schema.items()} - elif isinstance(schema, list): - return [process_nullable_types(item) for item in schema] - return schema - - -def generate_schema_from_graph( - graph: CompiledStateGraph[Any, Any, Any], -) -> Dict[str, Any]: - """Extract input/output schema from a LangGraph graph""" - schema = { - "input": {"type": "object", "properties": {}, "required": []}, - "output": {"type": "object", "properties": {}, "required": []}, - } - - if hasattr(graph, "input_schema"): - if hasattr(graph.input_schema, "model_json_schema"): - input_schema = graph.input_schema.model_json_schema() - unpacked_ref_def_properties = resolve_refs(input_schema) - - # Process the schema to handle nullable types - processed_properties = process_nullable_types( - unpacked_ref_def_properties.get("properties", {}) - ) - - schema["input"]["properties"] = processed_properties - schema["input"]["required"] = unpacked_ref_def_properties.get( - "required", [] - ) - - if hasattr(graph, "output_schema"): - if hasattr(graph.output_schema, "model_json_schema"): - output_schema = graph.output_schema.model_json_schema() - unpacked_ref_def_properties = resolve_refs(output_schema) - - # Process the schema to handle nullable types - processed_properties = process_nullable_types( - unpacked_ref_def_properties.get("properties", {}) - ) - - schema["output"]["properties"] = processed_properties - schema["output"]["required"] = unpacked_ref_def_properties.get( - "required", [] - ) - - return schema - - def generate_agent_md_file( target_directory: str, file_name: str, diff --git a/src/uipath_langchain/runtime_factories.py b/src/uipath_langchain/runtime_factories.py new file mode 100644 index 00000000..22443459 --- /dev/null +++ b/src/uipath_langchain/runtime_factories.py @@ -0,0 +1,21 @@ +"""Runtime factory for LangGraph projects.""" + +from uipath._cli._runtime._contracts import UiPathRuntimeFactory + +from ._cli._runtime._context import LangGraphRuntimeContext +from ._cli._runtime._runtime import LangGraphScriptRuntime + + +class LangGraphRuntimeFactory( + UiPathRuntimeFactory[LangGraphScriptRuntime, LangGraphRuntimeContext] +): + """Factory for LangGraph runtimes.""" + + def __init__(self): + super().__init__( + LangGraphScriptRuntime, + LangGraphRuntimeContext, + context_generator=lambda **kwargs: LangGraphRuntimeContext.with_defaults( + **kwargs + ), + ) diff --git a/uv.lock b/uv.lock index 49662caf..772c98a8 100644 --- a/uv.lock +++ b/uv.lock @@ -3308,7 +3308,7 @@ wheels = [ [[package]] name = "uipath" -version = "2.1.108" +version = "2.1.110" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "azure-monitor-opentelemetry" }, @@ -3330,9 +3330,9 @@ dependencies = [ { name = "tomli" }, { name = "truststore" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d7/7a/b453ccd146c2aaaad87ff4cd71e59fdeec08ad380ade08cc4b980fa46602/uipath-2.1.108.tar.gz", hash = "sha256:f0a89c6ad4394dacae3d448488b6f6e03129afa3097cb2f2f8193599987afc3f", size = 2241996, upload-time = "2025-10-24T16:07:35.416Z" } +sdist = { url = "https://files.pythonhosted.org/packages/21/b6/56322c6f762d35116bf5e9c2ab5b3b7d335291781f519bbb635e9739b7e8/uipath-2.1.110.tar.gz", hash = "sha256:758032323a79ce32ba14c7d7ad6fcd031e92057a24865e2154183c6c5147dd2f", size = 2298308, upload-time = "2025-10-24T18:47:47.996Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/84/49efdb83e21795d9eabea2dcce80f33868597cc31f317ec13f389e410dd0/uipath-2.1.108-py3-none-any.whl", hash = "sha256:031ba5bfe1f9785e8b3a1ea792acad9ac92585b7e51051c11c5e7abe4a03ba25", size = 298633, upload-time = "2025-10-24T16:07:33.429Z" }, + { url = "https://files.pythonhosted.org/packages/ee/44/e336ea0e8e977808f13acd3701fc0d8311410559815f9dda12fa773a41f3/uipath-2.1.110-py3-none-any.whl", hash = "sha256:130242c7eb99c0b20aba905bdddfa1f94f1fe8c5e5406201fa6b78ed37e30100", size = 352803, upload-time = "2025-10-24T18:47:46.03Z" }, ] [[package]] @@ -3383,7 +3383,7 @@ requires-dist = [ { name = "openinference-instrumentation-langchain", specifier = ">=0.1.50" }, { name = "pydantic-settings", specifier = ">=2.6.0" }, { name = "python-dotenv", specifier = ">=1.0.1" }, - { name = "uipath", specifier = ">=2.1.107,<2.2.0" }, + { name = "uipath", specifier = ">=2.1.110,<2.2.0" }, { name = "uipath-langchain", marker = "extra == 'langchain'", specifier = ">=0.0.2" }, ] provides-extras = ["langchain"]