From 509e7a659cb8f00c62670906db5573c49c24fd09 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Thu, 27 Jan 2022 15:35:09 -0600 Subject: [PATCH 01/15] Rework grpc locations and add some different structure --- .gitignore | 6 ++- LICENSE | 21 ++++++++ README.md | 8 ++- poetry.lock | 70 +++++++++++++++------------ pyproject.toml | 26 +++++++--- scripts/gen-protos.py | 64 ++++++++++++++++-------- temporalio/{proto => api}/__init__.py | 0 temporalio/bridge/__init__.py | 0 temporalio/bridge/proto/__init__.py | 0 tests/__init__.py | 0 tests/api/__init__.py | 0 tests/api/test_grpc_stub.py | 54 +++++++++++++++++++++ 12 files changed, 190 insertions(+), 59 deletions(-) create mode 100644 LICENSE rename temporalio/{proto => api}/__init__.py (100%) create mode 100644 temporalio/bridge/__init__.py create mode 100644 temporalio/bridge/proto/__init__.py create mode 100644 tests/__init__.py create mode 100644 tests/api/__init__.py create mode 100644 tests/api/test_grpc_stub.py diff --git a/.gitignore b/.gitignore index 2818b2936..a744c40b7 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,5 @@ -temporalio/proto/* +.venv +temporalio/api/* +!temporalio/api/__init__.py +temporalio/bridge/proto/* +!temporalio/bridge/proto/__init__.py diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..c7f1b9e7e --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License + +Copyright (c) 2022 Temporal Technologies Inc. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md index 3fe711ba3..e1bc5dd91 100644 --- a/README.md +++ b/README.md @@ -9,13 +9,19 @@ - [`poetry`](https://github.com/python-poetry/poetry) `pipx install poetry` - [`poe`](https://github.com/nat-n/poethepoet) `pipx install poethepoet` +- Use a local virtual env environment (helps IDEs and Windows): + + ```bash + poetry config virtualenvs.in-project true + ``` + - Install the package dependencies ```bash poetry install ``` -- Build the project (only generate the protos for now) +- Build the project ```bash poe build diff --git a/poetry.lock b/poetry.lock index 4d9c8bef9..d68aff957 100644 --- a/poetry.lock +++ b/poetry.lock @@ -118,6 +118,20 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "isort" +version = "5.10.1" +description = "A Python utility / library to sort Python imports." +category = "dev" +optional = false +python-versions = ">=3.6.1,<4.0" + +[package.extras] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +requirements_deprecated_finder = ["pipreqs", "pip-api"] +colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] + [[package]] name = "mypy" version = "0.931" @@ -155,14 +169,6 @@ python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" -[[package]] -name = "pastel" -version = "0.2.1" -description = "Bring colors to your terminal." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "pathspec" version = "0.9.0" @@ -198,21 +204,6 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "poethepoet" -version = "0.12.2" -description = "A task runner that works well with poetry." -category = "dev" -optional = false -python-versions = ">=3.6.2" - -[package.dependencies] -pastel = ">=0.2.1,<0.3.0" -tomli = ">=1.2.2,<2.0.0" - -[package.extras] -poetry_plugin = ["poetry (>=1.0,<2.0)"] - [[package]] name = "protobuf" version = "3.19.3" @@ -262,6 +253,21 @@ toml = "*" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.17.2" +description = "Pytest support for asyncio" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pytest = ">=6.1.0" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.8\""} + +[package.extras] +testing = ["coverage (==6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (==0.931)"] + [[package]] name = "six" version = "1.16.0" @@ -336,7 +342,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "059ab483abe5cb1cffb9a08f33c3f9325ceda3aaf49fd6f74202767fe268f31e" +content-hash = "e665197c5402c0a436481eaa9bd351b44a60652c2fc15003fd4c0d380c0c47e8" [metadata.files] atomicwrites = [ @@ -459,6 +465,10 @@ iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] +isort = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] mypy = [ {file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"}, {file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"}, @@ -489,10 +499,6 @@ packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] -pastel = [ - {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, - {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, -] pathspec = [ {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, @@ -505,10 +511,6 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] -poethepoet = [ - {file = "poethepoet-0.12.2-py3-none-any.whl", hash = "sha256:dd91d7113d3d33a40cfe6ca01c0a1982e33dae5a4f1f0c638f57199a00d85405"}, - {file = "poethepoet-0.12.2.tar.gz", hash = "sha256:76eee7111c1d63e765c441aee56fbc0abe151233df99e28b2da5b3dd1e7c9ea4"}, -] protobuf = [ {file = "protobuf-3.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1cb2ed66aac593adbf6dca4f07cd7ee7e2958b17bbc85b2cc8bc564ebeb258ec"}, {file = "protobuf-3.19.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:898bda9cd37ec0c781b598891e86435de80c3bfa53eb483a9dac5a11ec93e942"}, @@ -549,6 +551,10 @@ pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] +pytest-asyncio = [ + {file = "pytest-asyncio-0.17.2.tar.gz", hash = "sha256:6d895b02432c028e6957d25fc936494e78c6305736e785d9fee408b1efbc7ff4"}, + {file = "pytest_asyncio-0.17.2-py3-none-any.whl", hash = "sha256:e0fe5dbea40516b661ef1bcfe0bd9461c2847c4ef4bb40012324f2454fb7d56d"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, diff --git a/pyproject.toml b/pyproject.toml index 214324c23..29131f916 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,22 +12,36 @@ repository = "https://github.com/temporalio/sdk-python" documentation = "https://docs.temporal.io/docs/python" [tool.poetry.dependencies] -python = "^3.7" grpcio = "^1.43.0" -types-protobuf = "^3.19.6" mypy = "^0.931" +python = "^3.7" +types-protobuf = "^3.19.6" [tool.poetry.dev-dependencies] -mypy = "^0.931" black = "^21.12b0" grpcio-tools = "^1.43.0" +isort = "^5.10.1" +mypy = "^0.931" pytest = "^6.2.5" +pytest-asyncio = "^0.17.2" [tool.poe.tasks] -format = "black ." +build = ["gen-protos", "test"] +format = [{cmd = "black ."}, {cmd = "isort ."}] +gen-protos = "python scripts/gen-protos.py" lint = "black --check ." -build = "python scripts/gen-protos.py" + +[tool.poe.tasks.test] +cmd = "pytest" +env = { PYTHONDONTWRITEBYTECODE = "1" } + +[tool.pytest.ini_options] +asyncio_mode = "auto" + +[tool.isort] +profile = "black" +skip_gitignore = true [build-system] -requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" +requires = ["poetry-core>=1.0.0"] diff --git a/scripts/gen-protos.py b/scripts/gen-protos.py index 88a276cf8..fbc4d0565 100644 --- a/scripts/gen-protos.py +++ b/scripts/gen-protos.py @@ -1,19 +1,30 @@ #!/usr/bin/env python3 -import sys -import subprocess +import os import re +import shutil +import subprocess +import sys +import tempfile from functools import partial from pathlib import Path base_dir = Path(__file__).parent.parent -codegen_dir = base_dir / "temporalio" / "proto" proto_dir = base_dir / "temporalio" / "bridge" / "sdk-core" / "protos" api_proto_dir = proto_dir / "api_upstream" core_proto_dir = proto_dir / "local" proto_paths = proto_dir.glob("**/*.proto") -fix_import = partial( - re.compile(r"^from (temporal|dependencies)\.").sub, r"from temporalio.proto.\1." +api_out_dir = base_dir / "temporalio" / "api" +sdk_out_dir = base_dir / "temporalio" / "bridge" / "proto" + +fix_api_import = partial( + re.compile(r"from temporal\.api\.").sub, r"from temporalio.api." +) +fix_dependency_import = partial( + re.compile(r"from dependencies\.").sub, r"from temporalio.api.dependencies." +) +fix_sdk_import = partial( + re.compile(r"from temporal\.sdk\.core\.").sub, r"from temporalio.bridge.proto." ) @@ -31,23 +42,38 @@ def fix_generated_output(base_path: Path): fix_generated_output(p) else: with p.open(encoding="utf8") as f: - fixed_content = "".join(fix_import(l) for l in f) + content = f.read() + content = fix_api_import(content) + content = fix_dependency_import(content) + content = fix_sdk_import(content) with p.open("w") as f: - f.write(fixed_content) + f.write(content) if __name__ == "__main__": print("Generating protos...", file=sys.stderr) - subprocess.check_call( - [ - sys.executable, - "-mgrpc_tools.protoc", - f"--proto_path={api_proto_dir}", - f"--proto_path={core_proto_dir}", - f"--python_out={codegen_dir}", - f"--grpc_python_out={codegen_dir}", - *map(str, proto_paths), - ] - ) - fix_generated_output(codegen_dir) + with tempfile.TemporaryDirectory(dir=base_dir) as temp_dir: + temp_dir = Path(temp_dir) + subprocess.check_call( + [ + sys.executable, + "-mgrpc_tools.protoc", + f"--proto_path={api_proto_dir}", + f"--proto_path={core_proto_dir}", + f"--python_out={temp_dir}", + f"--grpc_python_out={temp_dir}", + *map(str, proto_paths), + ] + ) + # Apply import fixes before moving code + fix_generated_output(temp_dir) + # Move protos + for p in (temp_dir / "temporal" / "api").iterdir(): + shutil.rmtree(api_out_dir / p.name, ignore_errors=True) + p.replace(api_out_dir / p.name) + shutil.rmtree(api_out_dir / "dependencies", ignore_errors=True) + (temp_dir / "dependencies").replace(api_out_dir / "dependencies") + for p in (temp_dir / "temporal" / "sdk" / "core").iterdir(): + shutil.rmtree(sdk_out_dir / p.name, ignore_errors=True) + p.replace(sdk_out_dir / p.name) print("Done", file=sys.stderr) diff --git a/temporalio/proto/__init__.py b/temporalio/api/__init__.py similarity index 100% rename from temporalio/proto/__init__.py rename to temporalio/api/__init__.py diff --git a/temporalio/bridge/__init__.py b/temporalio/bridge/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/temporalio/bridge/proto/__init__.py b/temporalio/bridge/proto/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/api/__init__.py b/tests/api/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/api/test_grpc_stub.py b/tests/api/test_grpc_stub.py new file mode 100644 index 000000000..f8a3f01f9 --- /dev/null +++ b/tests/api/test_grpc_stub.py @@ -0,0 +1,54 @@ +import logging +from concurrent import futures + +import grpc + +import temporalio +import temporalio.api.workflowservice.v1.request_response_pb2 +import temporalio.api.workflowservice.v1.service_pb2_grpc + + +class SimpleServer( + temporalio.api.workflowservice.v1.service_pb2_grpc.WorkflowServiceServicer +): + async def CountWorkflowExecutions( + self, + request: temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsRequest, + context: grpc.aio.ServicerContext, + ) -> temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsResponse: + logging.info("Server RPC called") + assert request.namespace == "my namespace" + assert request.query == "my query" + return temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsResponse( + count=123 + ) + + +async def test_python_grpc_stub(): + """Make sure pure Python gRPC client works.""" + + # Start server + server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) + server = grpc.aio.server() + temporalio.api.workflowservice.v1.service_pb2_grpc.add_WorkflowServiceServicer_to_server( + SimpleServer(), server + ) + listen_addr = "[::]:50051" + server.add_insecure_port(listen_addr) + + logging.info("Starting server on %s", listen_addr) + await server.start() + + async with grpc.aio.insecure_channel("localhost:50051") as channel: + stub = temporalio.api.workflowservice.v1.service_pb2_grpc.WorkflowServiceStub( + channel + ) + response = await stub.CountWorkflowExecutions( + temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsRequest( + namespace="my namespace", query="my query" + ) + ) + assert response.count == 123 + + logging.info("Stopping server") + await server.stop(grace=None) From ca18e1bf361cca5ea3c21209b047db16ecd42260 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Thu, 27 Jan 2022 17:14:47 -0600 Subject: [PATCH 02/15] Support moving protos to top-level of module --- .gitignore | 1 + pyproject.toml | 8 ++------ scripts/gen-protos.py | 19 +++++++++++++++++-- temporalio/bridge/proto/__init__.py | 1 + tests/api/test_grpc_stub.py | 21 ++++++++------------- 5 files changed, 29 insertions(+), 21 deletions(-) diff --git a/.gitignore b/.gitignore index a744c40b7..69e1e7aee 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ temporalio/api/* !temporalio/api/__init__.py temporalio/bridge/proto/* !temporalio/bridge/proto/__init__.py +__pycache__ diff --git a/pyproject.toml b/pyproject.toml index 29131f916..19e76a013 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,6 @@ documentation = "https://docs.temporal.io/docs/python" [tool.poetry.dependencies] grpcio = "^1.43.0" -mypy = "^0.931" python = "^3.7" types-protobuf = "^3.19.6" @@ -29,11 +28,8 @@ pytest-asyncio = "^0.17.2" build = ["gen-protos", "test"] format = [{cmd = "black ."}, {cmd = "isort ."}] gen-protos = "python scripts/gen-protos.py" -lint = "black --check ." - -[tool.poe.tasks.test] -cmd = "pytest" -env = { PYTHONDONTWRITEBYTECODE = "1" } +lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}] +test = "pytest" [tool.pytest.ini_options] asyncio_mode = "auto" diff --git a/scripts/gen-protos.py b/scripts/gen-protos.py index fbc4d0565..5d2cfa50b 100644 --- a/scripts/gen-protos.py +++ b/scripts/gen-protos.py @@ -1,4 +1,5 @@ #!/usr/bin/env python3 +import collections import os import re import shutil @@ -27,16 +28,21 @@ re.compile(r"from temporal\.sdk\.core\.").sub, r"from temporalio.bridge.proto." ) +find_message_re = re.compile(r"_sym_db\.RegisterMessage\(([^\)\.]+)\)") +find_enum_re = re.compile(r"DESCRIPTOR\.enum_types_by_name\['([^']+)'\] =") +find_class_re = re.compile(r"\nclass ([^\(\:]+)") +find_def_re = re.compile(r"\ndef ([^\(\:]+)") + def fix_generated_output(base_path: Path): """Fix the generated protoc output - - protoc doesn't generate __init__.py files + - protoc doesn't generate __init__.py files nor re-export the types we want - protoc doesn't generate the correct import paths (https://github.com/protocolbuffers/protobuf/issues/1491) """ - (base_path / "__init__.py").touch() + imports = collections.defaultdict(list) for p in base_path.iterdir(): if p.is_dir(): fix_generated_output(p) @@ -46,8 +52,17 @@ def fix_generated_output(base_path: Path): content = fix_api_import(content) content = fix_dependency_import(content) content = fix_sdk_import(content) + imports[p.stem] += find_message_re.findall(content) + imports[p.stem] += find_enum_re.findall(content) + imports[p.stem] += find_class_re.findall(content) + imports[p.stem] += find_def_re.findall(content) with p.open("w") as f: f.write(content) + # Write init + with (base_path / "__init__.py").open("w") as f: + for stem, messages in imports.items(): + for message in messages: + f.write(f"from .{stem} import {message}\n") if __name__ == "__main__": diff --git a/temporalio/bridge/proto/__init__.py b/temporalio/bridge/proto/__init__.py index e69de29bb..35e8182e4 100644 --- a/temporalio/bridge/proto/__init__.py +++ b/temporalio/bridge/proto/__init__.py @@ -0,0 +1 @@ +from .core_interface_pb2 import ActivityHeartbeat, ActivityTaskCompletion diff --git a/tests/api/test_grpc_stub.py b/tests/api/test_grpc_stub.py index f8a3f01f9..1c68a4089 100644 --- a/tests/api/test_grpc_stub.py +++ b/tests/api/test_grpc_stub.py @@ -4,22 +4,19 @@ import grpc import temporalio -import temporalio.api.workflowservice.v1.request_response_pb2 -import temporalio.api.workflowservice.v1.service_pb2_grpc +import temporalio.api.workflowservice.v1 -class SimpleServer( - temporalio.api.workflowservice.v1.service_pb2_grpc.WorkflowServiceServicer -): +class SimpleServer(temporalio.api.workflowservice.v1.WorkflowServiceServicer): async def CountWorkflowExecutions( self, - request: temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsRequest, + request: temporalio.api.workflowservice.v1.CountWorkflowExecutionsRequest, context: grpc.aio.ServicerContext, - ) -> temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsResponse: + ) -> temporalio.api.workflowservice.v1.CountWorkflowExecutionsResponse: logging.info("Server RPC called") assert request.namespace == "my namespace" assert request.query == "my query" - return temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsResponse( + return temporalio.api.workflowservice.v1.CountWorkflowExecutionsResponse( count=123 ) @@ -30,7 +27,7 @@ async def test_python_grpc_stub(): # Start server server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) server = grpc.aio.server() - temporalio.api.workflowservice.v1.service_pb2_grpc.add_WorkflowServiceServicer_to_server( + temporalio.api.workflowservice.v1.add_WorkflowServiceServicer_to_server( SimpleServer(), server ) listen_addr = "[::]:50051" @@ -40,11 +37,9 @@ async def test_python_grpc_stub(): await server.start() async with grpc.aio.insecure_channel("localhost:50051") as channel: - stub = temporalio.api.workflowservice.v1.service_pb2_grpc.WorkflowServiceStub( - channel - ) + stub = temporalio.api.workflowservice.v1.WorkflowServiceStub(channel) response = await stub.CountWorkflowExecutions( - temporalio.api.workflowservice.v1.request_response_pb2.CountWorkflowExecutionsRequest( + temporalio.api.workflowservice.v1.CountWorkflowExecutionsRequest( namespace="my namespace", query="my query" ) ) From 2c1b826d3f83e6f6fdc25ee270cf1d41640803b8 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Fri, 28 Jan 2022 11:38:20 -0600 Subject: [PATCH 03/15] Add mypy check --- poetry.lock | 26 +++++++++++++++++++++----- pyproject.toml | 13 ++++++++++++- scripts/gen-protos.py | 19 ++++++++++++------- 3 files changed, 45 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index d68aff957..3f9179203 100644 --- a/poetry.lock +++ b/poetry.lock @@ -136,7 +136,7 @@ plugins = ["setuptools"] name = "mypy" version = "0.931" description = "Optional static typing for Python" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -154,10 +154,22 @@ python2 = ["typed-ast (>=1.4.0,<2)"] name = "mypy-extensions" version = "0.4.3" description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "main" +category = "dev" optional = false python-versions = "*" +[[package]] +name = "mypy-protobuf" +version = "3.2.0" +description = "Generate mypy stub files from protobuf specs" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +protobuf = ">=3.19.3" +types-protobuf = ">=3.19.5" + [[package]] name = "packaging" version = "21.3" @@ -288,7 +300,7 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" name = "tomli" version = "1.2.3" description = "A lil' TOML parser" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -296,7 +308,7 @@ python-versions = ">=3.6" name = "typed-ast" version = "1.5.2" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -342,7 +354,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "e665197c5402c0a436481eaa9bd351b44a60652c2fc15003fd4c0d380c0c47e8" +content-hash = "2c3b7797e9c425dff9cb79f175ff7d4f1a1e2097f9c7a6e6728cd26f4e6bf4e0" [metadata.files] atomicwrites = [ @@ -495,6 +507,10 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] +mypy-protobuf = [ + {file = "mypy-protobuf-3.2.0.tar.gz", hash = "sha256:730aa15337c38f0446fbe08f6c6c2370ee01d395125369d4b70e08b1e2ee30ee"}, + {file = "mypy_protobuf-3.2.0-py3-none-any.whl", hash = "sha256:65fc0492165f4a3c0aff69b03e34096fc1453e4dac8f14b4e9c2306cdde06010"}, +] packaging = [ {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, diff --git a/pyproject.toml b/pyproject.toml index 19e76a013..122543b43 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,12 +15,14 @@ documentation = "https://docs.temporal.io/docs/python" grpcio = "^1.43.0" python = "^3.7" types-protobuf = "^3.19.6" +typing-extensions = "^4.0.1" [tool.poetry.dev-dependencies] black = "^21.12b0" grpcio-tools = "^1.43.0" isort = "^5.10.1" mypy = "^0.931" +mypy-protobuf = "^3.2.0" pytest = "^6.2.5" pytest-asyncio = "^0.17.2" @@ -28,7 +30,8 @@ pytest-asyncio = "^0.17.2" build = ["gen-protos", "test"] format = [{cmd = "black ."}, {cmd = "isort ."}] gen-protos = "python scripts/gen-protos.py" -lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}] +lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}, "lint-types"] +lint-types = "mypy ." test = "pytest" [tool.pytest.ini_options] @@ -38,6 +41,14 @@ asyncio_mode = "auto" profile = "black" skip_gitignore = true +[tool.mypy] +ignore_missing_imports = true +exclude = [ + # Ignore generated code + 'temporalio/api', + 'temporalio/bridge/proto', +] + [build-system] build-backend = "poetry.core.masonry.api" requires = ["poetry-core>=1.0.0"] diff --git a/scripts/gen-protos.py b/scripts/gen-protos.py index 5d2cfa50b..4a6129961 100644 --- a/scripts/gen-protos.py +++ b/scripts/gen-protos.py @@ -8,6 +8,7 @@ import tempfile from functools import partial from pathlib import Path +from typing import Mapping base_dir = Path(__file__).parent.parent proto_dir = base_dir / "temporalio" / "bridge" / "sdk-core" / "protos" @@ -42,7 +43,7 @@ def fix_generated_output(base_path: Path): (https://github.com/protocolbuffers/protobuf/issues/1491) """ - imports = collections.defaultdict(list) + imports: Mapping[str, list[str]] = collections.defaultdict(list) for p in base_path.iterdir(): if p.is_dir(): fix_generated_output(p) @@ -52,10 +53,12 @@ def fix_generated_output(base_path: Path): content = fix_api_import(content) content = fix_dependency_import(content) content = fix_sdk_import(content) - imports[p.stem] += find_message_re.findall(content) - imports[p.stem] += find_enum_re.findall(content) - imports[p.stem] += find_class_re.findall(content) - imports[p.stem] += find_def_re.findall(content) + # Only use .py files to determine imports, not pyi ones + if p.suffix == ".py": + imports[p.stem] += find_message_re.findall(content) + imports[p.stem] += find_enum_re.findall(content) + imports[p.stem] += find_class_re.findall(content) + imports[p.stem] += find_def_re.findall(content) with p.open("w") as f: f.write(content) # Write init @@ -67,8 +70,8 @@ def fix_generated_output(base_path: Path): if __name__ == "__main__": print("Generating protos...", file=sys.stderr) - with tempfile.TemporaryDirectory(dir=base_dir) as temp_dir: - temp_dir = Path(temp_dir) + with tempfile.TemporaryDirectory(dir=base_dir) as temp_dir_raw: + temp_dir = Path(temp_dir_raw) subprocess.check_call( [ sys.executable, @@ -77,6 +80,8 @@ def fix_generated_output(base_path: Path): f"--proto_path={core_proto_dir}", f"--python_out={temp_dir}", f"--grpc_python_out={temp_dir}", + f"--mypy_out={temp_dir}", + f"--mypy_grpc_out={temp_dir}", *map(str, proto_paths), ] ) From 1c0f2c46291b51fce003146d37920aeb4368594e Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Fri, 28 Jan 2022 13:48:26 -0600 Subject: [PATCH 04/15] Fix test stub --- tests/api/test_grpc_stub.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/api/test_grpc_stub.py b/tests/api/test_grpc_stub.py index 1c68a4089..ad9025110 100644 --- a/tests/api/test_grpc_stub.py +++ b/tests/api/test_grpc_stub.py @@ -25,18 +25,15 @@ async def test_python_grpc_stub(): """Make sure pure Python gRPC client works.""" # Start server - server = grpc.server(futures.ThreadPoolExecutor(max_workers=10)) server = grpc.aio.server() temporalio.api.workflowservice.v1.add_WorkflowServiceServicer_to_server( SimpleServer(), server ) - listen_addr = "[::]:50051" - server.add_insecure_port(listen_addr) - - logging.info("Starting server on %s", listen_addr) + port = server.add_insecure_port("[::]:0") + logging.info("Starting server on %s", port) await server.start() - async with grpc.aio.insecure_channel("localhost:50051") as channel: + async with grpc.aio.insecure_channel(f"localhost:{port}") as channel: stub = temporalio.api.workflowservice.v1.WorkflowServiceStub(channel) response = await stub.CountWorkflowExecutions( temporalio.api.workflowservice.v1.CountWorkflowExecutionsRequest( From d04150b30f18bf0d392fc8061f0325fce0fe94e7 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Fri, 28 Jan 2022 16:26:41 -0600 Subject: [PATCH 05/15] Initial work on client and converter --- temporalio/__init__.py | 5 + temporalio/bridge/sdk-bridge/Cargo.toml | 18 ++++ temporalio/client/__init__.py | 4 + temporalio/client/client.py | 131 ++++++++++++++++++++++++ temporalio/client/workflow_execution.py | 2 + temporalio/client/workflow_handle.py | 39 +++++++ temporalio/client/workflow_service.py | 14 +++ temporalio/common/__init__.py | 1 + temporalio/common/retry_policy.py | 13 +++ temporalio/converter/__init__.py | 20 ++++ temporalio/converter/converter.py | 81 +++++++++++++++ temporalio/converter/plain.py | 78 ++++++++++++++ temporalio/converter/proto.py | 56 ++++++++++ 13 files changed, 462 insertions(+) create mode 100644 temporalio/bridge/sdk-bridge/Cargo.toml create mode 100644 temporalio/client/__init__.py create mode 100644 temporalio/client/client.py create mode 100644 temporalio/client/workflow_execution.py create mode 100644 temporalio/client/workflow_handle.py create mode 100644 temporalio/client/workflow_service.py create mode 100644 temporalio/common/__init__.py create mode 100644 temporalio/common/retry_policy.py create mode 100644 temporalio/converter/__init__.py create mode 100644 temporalio/converter/converter.py create mode 100644 temporalio/converter/plain.py create mode 100644 temporalio/converter/proto.py diff --git a/temporalio/__init__.py b/temporalio/__init__.py index e69de29bb..f957d5aea 100644 --- a/temporalio/__init__.py +++ b/temporalio/__init__.py @@ -0,0 +1,5 @@ +import typing + +from .common import RetryPolicy + +Convertible = typing.Any diff --git a/temporalio/bridge/sdk-bridge/Cargo.toml b/temporalio/bridge/sdk-bridge/Cargo.toml new file mode 100644 index 000000000..153a20074 --- /dev/null +++ b/temporalio/bridge/sdk-bridge/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "temporal-sdk-core-bridge" +version = "0.1.0" +edition = "2021" + +[lib] +name = "temporal_sdk_core_bridge" +crate-type = ["cdylib"] + +[dependencies] +tokio = "1.15" +prost = "0.9" +prost-types = "0.9" +temporal-sdk-core = { version = "0.1.0", path = "../sdk-core/core" } +temporal-sdk-core-api = { version = "0.1.0", path = "../sdk-core/core-api" } +temporal-sdk-core-protos = { version = "0.1.0", path = "../sdk-core/sdk-core-protos" } +pyo3 = { version = "0.15", features = ["extension-module"] } +pyo3-asyncio = { version = "0.15", features = ["tokio-runtime"] } \ No newline at end of file diff --git a/temporalio/client/__init__.py b/temporalio/client/__init__.py new file mode 100644 index 000000000..67e03f8c2 --- /dev/null +++ b/temporalio/client/__init__.py @@ -0,0 +1,4 @@ +from .client import Client, TLSConfig, WorkflowIDReusePolicy +from .workflow_execution import WorkflowExecution +from .workflow_handle import WorkflowHandle +from .workflow_service import WorkflowService diff --git a/temporalio/client/client.py b/temporalio/client/client.py new file mode 100644 index 000000000..b89f67dc9 --- /dev/null +++ b/temporalio/client/client.py @@ -0,0 +1,131 @@ +from dataclasses import dataclass +from datetime import timedelta +from enum import Enum +from typing import Any, Awaitable, Mapping, Optional, Union + +import temporalio +import temporalio.client +import temporalio.converter + + +@dataclass +class TLSConfig: + server_root_ca_cert: Optional[Union[str, bytes]] = None + domain: Optional[str] = None + client_cert: Optional[Union[str, bytes]] = None + client_private_key: Optional[Union[str, bytes]] = None + + +class WorkflowIDReusePolicy(Enum): + ALLOW_DUPLICATE = 1 + ALLOW_DUPLICATE_FAILED_ONLY = 2 + REJECT_DUPLICATE = 3 + + +class Client: + @staticmethod + async def connect( + addr: str, + *, + namespace: str = "default", + payload_converter: temporalio.converter.PayloadConverter = temporalio.converter.default(), + headers: Mapping[str, str] = {}, + identity: Optional[str] = None, + tls_config: Optional[TLSConfig] = None, + retry_policy: Optional[temporalio.RetryPolicy] = None + ) -> "Client": + client = Client( + addr, + namespace=namespace, + payload_converter=payload_converter, + headers=headers, + identity=identity, + tls_config=tls_config, + retry_policy=retry_policy, + ) + await client.ready() + return client + + service: Awaitable[temporalio.client.WorkflowService] + + def __init__( + self, + addr: str, + *, + namespace: str = "default", + payload_converter: temporalio.converter.PayloadConverter = temporalio.converter.default(), + headers: Mapping[str, str] = {}, + identity: Optional[str] = None, + tls_config: Optional[TLSConfig] = None, + retry_policy: Optional[temporalio.RetryPolicy] = None + ) -> None: + raise NotImplementedError + + async def __aenter__(self) -> "Client": + await self.ready() + return self + + async def __aexit__(self) -> None: + await self.close() + + async def ready(self) -> None: + raise NotImplementedError + + async def close(self) -> None: + raise NotImplementedError + + async def start_workflow( + self, + workflow: str, + *args: Any, + task_queue: str, + id: Optional[str] = None, + execution_timeout: Optional[timedelta] = None, + run_timeout: Optional[timedelta] = None, + task_timeout: Optional[timedelta] = None, + id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: Optional[temporalio.RetryPolicy] = None, + cron_schedule: Optional[str] = None, + memo: Mapping[str, temporalio.Convertible] = {}, + search_attributes: Mapping[str, temporalio.Convertible] = {}, + header: Mapping[str, temporalio.Convertible] = {} + ) -> temporalio.client.WorkflowHandle[Optional[temporalio.Convertible]]: + raise NotImplementedError + + async def execute_workflow( + self, + workflow: str, + *args: Any, + task_queue: str, + id: Optional[str] = None, + execution_timeout: Optional[timedelta] = None, + run_timeout: Optional[timedelta] = None, + task_timeout: Optional[timedelta] = None, + id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: Optional[temporalio.RetryPolicy] = None, + cron_schedule: Optional[str] = None, + memo: Mapping[str, temporalio.Convertible] = {}, + search_attributes: Mapping[str, temporalio.Convertible] = {}, + header: Mapping[str, temporalio.Convertible] = {} + ) -> Optional[temporalio.Convertible]: + handle = await self.start_workflow( + workflow, + *args, + task_queue=task_queue, + id=id, + execution_timeout=execution_timeout, + run_timeout=run_timeout, + task_timeout=task_timeout, + id_reuse_policy=id_reuse_policy, + retry_policy=retry_policy, + cron_schedule=cron_schedule, + memo=memo, + search_attributes=search_attributes, + header=header + ) + return await handle.result() + + def get_workflow_handle( + self, id: str, run_id: Optional[str] = None + ) -> temporalio.client.WorkflowHandle[Optional[temporalio.Convertible]]: + return temporalio.client.WorkflowHandle(self, id, run_id) diff --git a/temporalio/client/workflow_execution.py b/temporalio/client/workflow_execution.py new file mode 100644 index 000000000..b70753aae --- /dev/null +++ b/temporalio/client/workflow_execution.py @@ -0,0 +1,2 @@ +class WorkflowExecution: + pass diff --git a/temporalio/client/workflow_handle.py b/temporalio/client/workflow_handle.py new file mode 100644 index 000000000..a377fbdd4 --- /dev/null +++ b/temporalio/client/workflow_handle.py @@ -0,0 +1,39 @@ +from typing import Generic, Optional, TypeVar + +import temporalio +import temporalio.client + +T = TypeVar("T") + + +class WorkflowHandle(Generic[T]): + _client: temporalio.client.Client + id: str + run_id: Optional[str] = None + + def __init__( + self, client: temporalio.client.Client, id: str, run_id: Optional[str] = None + ) -> None: + self._client = client + self.id = id + self.run_id = run_id + + async def result(self) -> T: + raise NotImplementedError + + async def cancel(self) -> None: + raise NotImplementedError + + async def describe(self) -> temporalio.client.WorkflowExecution: + raise NotImplementedError + + async def query( + self, name: str, *args: temporalio.Convertible + ) -> temporalio.Convertible: + raise NotImplementedError + + async def signal(self, name: str, *args: temporalio.Convertible) -> None: + raise NotImplementedError + + async def terminate(self, *, reason: Optional[str] = None) -> None: + raise NotImplementedError diff --git a/temporalio/client/workflow_service.py b/temporalio/client/workflow_service.py new file mode 100644 index 000000000..b62e87158 --- /dev/null +++ b/temporalio/client/workflow_service.py @@ -0,0 +1,14 @@ +from temporalio.api.workflowservice.v1 import ( + StartWorkflowExecutionRequest, + StartWorkflowExecutionResponse, +) + + +class WorkflowService: + def __init__(self): + raise NotImplementedError + + async def start_workflow_execution( + self, request: StartWorkflowExecutionRequest + ) -> StartWorkflowExecutionResponse: + raise NotImplementedError diff --git a/temporalio/common/__init__.py b/temporalio/common/__init__.py new file mode 100644 index 000000000..34150a0e5 --- /dev/null +++ b/temporalio/common/__init__.py @@ -0,0 +1 @@ +from .retry_policy import RetryPolicy diff --git a/temporalio/common/retry_policy.py b/temporalio/common/retry_policy.py new file mode 100644 index 000000000..c22659cdb --- /dev/null +++ b/temporalio/common/retry_policy.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass +from datetime import timedelta +from typing import Optional + + +@dataclass +class RetryPolicy: + initial_interval: timedelta + randomization_factor: float + multiplier: float + max_interval: timedelta + max_elapsed_time: Optional[timedelta] + max_retries: int diff --git a/temporalio/converter/__init__.py b/temporalio/converter/__init__.py new file mode 100644 index 000000000..161cdd837 --- /dev/null +++ b/temporalio/converter/__init__.py @@ -0,0 +1,20 @@ +from .converter import CompositePayloadConverter, PayloadConverter +from .plain import ( + BinaryNullPayloadConverter, + BinaryPlainPayloadConverter, + JSONPlainPayloadConverter, +) +from .proto import BinaryProtoPayloadConverter, JSONProtoPayloadConverter + + +# TODO(cretz): Should this be a var that can be changed instead? If so, can it +# be replaced _after_ client creation? We'd just have to fallback to this +# default at conversion time instead of instantiation time. +def default() -> PayloadConverter: + return CompositePayloadConverter( + BinaryNullPayloadConverter(), + BinaryPlainPayloadConverter(), + JSONProtoPayloadConverter(), + BinaryProtoPayloadConverter(), + JSONPlainPayloadConverter(), + ) diff --git a/temporalio/converter/converter.py b/temporalio/converter/converter.py new file mode 100644 index 000000000..02a59b768 --- /dev/null +++ b/temporalio/converter/converter.py @@ -0,0 +1,81 @@ +from abc import ABC, abstractmethod +from typing import Any, Optional, Tuple + +import temporalio.api.common.v1 + + +class PayloadConverter(ABC): + @abstractmethod + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + return None + + @abstractmethod + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + return (None, False) + + async def encode_multiple( + self, values: list[Any] + ) -> Optional[temporalio.api.common.v1.Payloads]: + payloads = [] + for value in values: + payload = await self.encode(value) + # Return if any payloads cannot be converted + if payload is None: + return None + payloads.append(payload) + return temporalio.api.common.v1.Payloads(payloads=payloads) + + async def decode_multiple( + self, payloads: temporalio.api.common.v1.Payloads + ) -> Tuple[list[Any], bool]: + values = [] + for payload in payloads.payloads: + value, ok = await self.decode(payload) + # Return if any values cannot be converted + if not ok: + return ([], False) + values.append(value) + return (values, True) + + +class CompositePayloadConverter(PayloadConverter): + _converters: list[PayloadConverter] + + def __init__(self, *converters: PayloadConverter) -> None: + self._converters = list(converters) + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + for converter in self._converters: + payload = await converter.encode(value) + if payload is not None: + return payload + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + for converter in self._converters: + value, ok = await converter.decode(payload) + if ok: + return (value, True) + return (None, False) + + async def encode_multiple( + self, values: list[Any] + ) -> Optional[temporalio.api.common.v1.Payloads]: + for converter in self._converters: + payloads = await converter.encode_multiple(values) + if payloads is not None: + return payloads + return None + + async def decode_multiple( + self, payloads: temporalio.api.common.v1.Payloads + ) -> Tuple[list[Any], bool]: + for converter in self._converters: + values, ok = await converter.decode_multiple(payloads) + if ok: + return (values, True) + return ([], False) diff --git a/temporalio/converter/plain.py b/temporalio/converter/plain.py new file mode 100644 index 000000000..f748c7660 --- /dev/null +++ b/temporalio/converter/plain.py @@ -0,0 +1,78 @@ +import dataclasses +import json +from dataclasses import dataclass +from typing import Any, Optional, Tuple, Type + +import temporalio.api.common.v1 +import temporalio.converter + + +class BinaryNullPayloadConverter(temporalio.converter.PayloadConverter): + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + if value is None: + return temporalio.api.common.v1.Payload( + metadata={"encoding": b"binary/null"} + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + return (None, payload.metadata["encoding"] == b"binary/null") + + +class BinaryPlainPayloadConverter(temporalio.converter.PayloadConverter): + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + if isinstance(value, bytes): + return temporalio.api.common.v1.Payload( + metadata={"encoding": b"binary/plain"}, data=value + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + return (payload.data, payload.metadata["encoding"] == b"binary/plain") + + +class JSONPlainPayloadConverter(temporalio.converter.PayloadConverter): + _encoder: Optional[Type[json.JSONEncoder]] + _decoder: Optional[Type[json.JSONDecoder]] + _dataclass_asdict: bool + _encoding: bytes + + # TODO(cretz): Document that it can be customized/reused, but the encoding should be changed + def __init__( + self, + *, + encoder: Optional[Type[json.JSONEncoder]] = None, + decoder: Optional[Type[json.JSONDecoder]] = None, + dataclass_asdict: bool = True, + encoding: str = "json/plain" + ) -> None: + super().__init__() + self._encoder = encoder + self._decoder = decoder + self._dataclass_asdict = dataclass_asdict + self._encoding = encoding.encode() + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + if self._dataclass_asdict and dataclasses.is_dataclass(value): + value = dataclasses.asdict(value) + # We swallow JSON encode error and just return None + try: + return temporalio.api.common.v1.Payload( + metadata={"encoding": self._encoding}, + data=json.dumps(value, cls=self._encoder).encode(), + ) + except (RuntimeError, TypeError, ValueError): + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + if payload.metadata["encoding"] == self._encoding: + # We do not swallow JSON decode errors since we expect success due + # to already-matched encoding + return (json.loads(payload.data, cls=self._decoder), True) + return (None, False) diff --git a/temporalio/converter/proto.py b/temporalio/converter/proto.py new file mode 100644 index 000000000..d595e3a63 --- /dev/null +++ b/temporalio/converter/proto.py @@ -0,0 +1,56 @@ +from typing import Any, Optional, Tuple + +import google.protobuf.json_format +import google.protobuf.message +import google.protobuf.symbol_database + +import temporalio.api.common.v1 +import temporalio.converter + +_sym_db = google.protobuf.symbol_database.Default() + + +class JSONProtoPayloadConverter(temporalio.converter.PayloadConverter): + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + if issubclass(value, google.protobuf.message.Message): + return temporalio.api.common.v1.Payload( + metadata={ + "encoding": b"json/protobuf", + "messageType": value.DESCRIPTOR.full_name, + }, + data=google.protobuf.json_format.MessageToJson(value).encode(), + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + if payload.metadata["encoding"] == b"json/protobuf": + # This raises error if not found + value = _sym_db.GetSymbol(str(payload.metadata["messageType"]))() + google.protobuf.json_format.Parse(payload.data, value) + return (value, True) + return (None, False) + + +class BinaryProtoPayloadConverter(temporalio.converter.PayloadConverter): + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + if issubclass(value, google.protobuf.message.Message): + return temporalio.api.common.v1.Payload( + metadata={ + "encoding": b"binary/protobuf", + "messageType": value.DESCRIPTOR.full_name, + }, + data=value.SerializeToString(), + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + if payload.metadata["encoding"] == b"binary/protobuf": + # This raises error if not found + value = _sym_db.GetSymbol(str(payload.metadata["messageType"]))() + value.ParseFromString(payload.data) + return (value, True) + return (None, False) From ccbdb013157fcd17a8c2e63cd7fab4ec092bef21 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Mon, 31 Jan 2022 08:28:53 -0600 Subject: [PATCH 06/15] trigger GitHub actions From da4491003b4836b86b13cfca5fb3e584a86dc9e3 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Mon, 31 Jan 2022 10:24:52 -0600 Subject: [PATCH 07/15] Converter docs and tests --- temporalio/converter.py | 345 ++++++++++++++++++++++++++++++ temporalio/converter/__init__.py | 20 -- temporalio/converter/converter.py | 81 ------- temporalio/converter/plain.py | 78 ------- temporalio/converter/proto.py | 56 ----- tests/api/test_grpc_stub.py | 1 - tests/converter_test.py | 97 +++++++++ 7 files changed, 442 insertions(+), 236 deletions(-) create mode 100644 temporalio/converter.py delete mode 100644 temporalio/converter/__init__.py delete mode 100644 temporalio/converter/converter.py delete mode 100644 temporalio/converter/plain.py delete mode 100644 temporalio/converter/proto.py create mode 100644 tests/converter_test.py diff --git a/temporalio/converter.py b/temporalio/converter.py new file mode 100644 index 000000000..2ee9bd434 --- /dev/null +++ b/temporalio/converter.py @@ -0,0 +1,345 @@ +"""Base converter and default implementations for conversion to/from values/payloads.""" + +import dataclasses +import json +from abc import ABC, abstractmethod +from typing import Any, Optional, Tuple, Type + +import google.protobuf.json_format +import google.protobuf.message +import google.protobuf.symbol_database + +import temporalio.api.common.v1 + + +class PayloadConverter(ABC): + """Base converter to/from values/payloads.""" + + @abstractmethod + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """Encode a single value to a payload or None. + + Args: + value: Value to be converted. + + Returns: + Payload of the value or None if unable to convert. + """ + return None + + @abstractmethod + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + """Decode a single payload to a Python value if able. + + Args: + payload: Payload to convert to Python value. + + Return: + A tuple with the first value as the Python value and the second as + whether it could be converted or not. If the payload can be + converted, it is the first value of the tuple and the second value + is True. If the payload cannot be converted, the first value is + undefined and the second value is False. + """ + return (None, False) + + async def encode_multiple( + self, values: list[Any] + ) -> Optional[temporalio.api.common.v1.Payloads]: + """Encode multiple values into payloads if able. + + Values are expected to be of a common payload type/encoding. The default + implementation makes one payload for each value but subclasses may alter + that. + + Args: + values: List of values to convert. + """ + payloads = [] + for value in values: + payload = await self.encode(value) + # Return if any payloads cannot be converted + if payload is None: + return None + payloads.append(payload) + return temporalio.api.common.v1.Payloads(payloads=payloads) + + async def decode_multiple( + self, payloads: temporalio.api.common.v1.Payloads + ) -> Tuple[list[Any], bool]: + """Decode multiple payloads into Python values if able. + + Payloads are expected to be of a common type/encoding. The default + implementation makes value for each payload but subclasses may alter + that. + + Args: + payloads: Payloads to convert to Python values. + + Return: + A tuple with the first value as a collection of Python values and + the second as whether it could be converted or not. If the payloads + can be converted, they are the first values of the tuple and the + second value is True. If the payloads cannot be converted, the first + value is undefined and the second value is False. + """ + values = [] + for payload in payloads.payloads: + value, ok = await self.decode(payload) + # Return if any values cannot be converted + if not ok: + return ([], False) + values.append(value) + return (values, True) + + +class CompositePayloadConverter(PayloadConverter): + """Composite converter that delegates to a list of converters. + + Encoding/decoding are attempted on each converter successively until it + succeeds. + + Attributes: + converters: List of converters to delegate to, in order. + """ + + converters: list[PayloadConverter] + + def __init__(self, *converters: PayloadConverter) -> None: + """Initializes the converter. + + Args: + converters: Converters to delegate to, in order. + """ + self.converters = list(converters) + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """Encode value trying each converter. See base class.""" + for converter in self.converters: + payload = await converter.encode(value) + if payload is not None: + return payload + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + """Decode payload trying each converter. See base class.""" + for converter in self.converters: + value, ok = await converter.decode(payload) + if ok: + return (value, True) + return (None, False) + + async def encode_multiple( + self, values: list[Any] + ) -> Optional[temporalio.api.common.v1.Payloads]: + """Encode multiple values trying each converter. See base class. + + Note, this attempts full encode_multiple calls on the delegated + converter. It does not allow different converters to encode different + values. A single converter must be able to convert them all at once. + """ + for converter in self.converters: + payloads = await converter.encode_multiple(values) + if payloads is not None: + return payloads + return None + + async def decode_multiple( + self, payloads: temporalio.api.common.v1.Payloads + ) -> Tuple[list[Any], bool]: + """Decode multiple payloads trying each converter. See base class. + + Note, this attempts full decode_multiple calls on the delegated + converter. It does not allow different converters to decode different + payloads. A single converter must be able to convert them all at once. + """ + for converter in self.converters: + values, ok = await converter.decode_multiple(payloads) + if ok: + return (values, True) + return ([], False) + + +class BinaryNullPayloadConverter(PayloadConverter): + """Converter for binary/null payloads supporting None values.""" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if value is None: + return temporalio.api.common.v1.Payload( + metadata={"encoding": b"binary/null"} + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + return (None, payload.metadata["encoding"] == b"binary/null") + + +class BinaryPlainPayloadConverter(PayloadConverter): + """Converter for binary/plain payloads supporting bytes values.""" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if isinstance(value, bytes): + return temporalio.api.common.v1.Payload( + metadata={"encoding": b"binary/plain"}, data=value + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + """See base class.""" + return (payload.data, payload.metadata["encoding"] == b"binary/plain") + + +_sym_db = google.protobuf.symbol_database.Default() + + +class JSONProtoPayloadConverter(PayloadConverter): + """Converter for json/protobuf payloads supporting protobuf Message values.""" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if isinstance(value, google.protobuf.message.Message): + # We have to convert to dict then to JSON because MessageToJson does + # not have a compact option removing spaces and newlines + json_str = json.dumps( + google.protobuf.json_format.MessageToDict(value), + separators=(",", ":"), + sort_keys=True, + ) + return temporalio.api.common.v1.Payload( + metadata={ + "encoding": b"json/protobuf", + "messageType": value.DESCRIPTOR.full_name.encode(), + }, + data=json_str.encode(), + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + """See base class.""" + if payload.metadata["encoding"] == b"json/protobuf": + # This raises error if not found + value = _sym_db.GetSymbol(payload.metadata["messageType"].decode())() + google.protobuf.json_format.Parse(payload.data, value) + return (value, True) + return (None, False) + + +class BinaryProtoPayloadConverter(PayloadConverter): + """Converter for binary/protobuf payloads supporting protobuf Message values.""" + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if isinstance(value, google.protobuf.message.Message): + return temporalio.api.common.v1.Payload( + metadata={ + "encoding": b"binary/protobuf", + "messageType": value.DESCRIPTOR.full_name.encode(), + }, + data=value.SerializeToString(), + ) + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + """See base class.""" + if payload.metadata["encoding"] == b"binary/protobuf": + # This raises error if not found + value = _sym_db.GetSymbol(payload.metadata["messageType"].decode())() + value.ParseFromString(payload.data) + return (value, True) + return (None, False) + + +class JSONPlainPayloadConverter(PayloadConverter): + """Converter for json/plain payloads supporting common Python values. + + This supports all values that :py:func:`json.dump` supports and also adds + encoding support for :py:mod:`dataclasses` by converting them using + :py:func:`dataclasses.asdict`. Note that on decode they come back as dict as + well and the caller must convert back to a data class. + """ + + _encoder: Optional[Type[json.JSONEncoder]] + _decoder: Optional[Type[json.JSONDecoder]] + _dataclass_asdict: bool + _encoding: bytes + + def __init__( + self, + *, + encoder: Optional[Type[json.JSONEncoder]] = None, + decoder: Optional[Type[json.JSONDecoder]] = None, + dataclass_asdict: bool = True, + encoding: str = "json/plain" + ) -> None: + """Initialize a JSON data converter. + + Args: + encoder: Custom encoder class object to use. + decoder: Custom decoder class object to use. + dataclass_asdict: Whether to support data class encoding. + encoding: Encoding name to use. + """ + super().__init__() + self._encoder = encoder + self._decoder = decoder + self._dataclass_asdict = dataclass_asdict + self._encoding = encoding.encode() + + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: + """See base class.""" + if self._dataclass_asdict and dataclasses.is_dataclass(value): + value = dataclasses.asdict(value) + # We swallow JSON encode error and just return None + try: + return temporalio.api.common.v1.Payload( + metadata={"encoding": self._encoding}, + data=json.dumps( + value, cls=self._encoder, separators=(",", ":"), sort_keys=True + ).encode(), + ) + except (RuntimeError, TypeError, ValueError): + return None + + async def decode( + self, payload: temporalio.api.common.v1.Payload + ) -> Tuple[Any, bool]: + """See base class.""" + if payload.metadata["encoding"] == self._encoding: + # We do not swallow JSON decode errors since we expect success due + # to already-matched encoding + return (json.loads(payload.data, cls=self._decoder), True) + return (None, False) + + +# TODO(cretz): Should this be a var that can be changed instead? If so, can it +# be replaced _after_ client creation? We'd just have to fallback to this +# default at conversion time instead of instantiation time. +def default() -> CompositePayloadConverter: + """Default converter compatible with other Temporal SDKs. + + This handles None, bytes, all protobuf message types, and any type that + :py:func:`json.dump` accepts. In addition, this supports encoding + :py:mod:`dataclasses` but not decoding them, so decoded data classes appear + as dicts which may need to be converted to data classes by users. + """ + return CompositePayloadConverter( + BinaryNullPayloadConverter(), + BinaryPlainPayloadConverter(), + JSONProtoPayloadConverter(), + BinaryProtoPayloadConverter(), + JSONPlainPayloadConverter(), + ) diff --git a/temporalio/converter/__init__.py b/temporalio/converter/__init__.py deleted file mode 100644 index 161cdd837..000000000 --- a/temporalio/converter/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -from .converter import CompositePayloadConverter, PayloadConverter -from .plain import ( - BinaryNullPayloadConverter, - BinaryPlainPayloadConverter, - JSONPlainPayloadConverter, -) -from .proto import BinaryProtoPayloadConverter, JSONProtoPayloadConverter - - -# TODO(cretz): Should this be a var that can be changed instead? If so, can it -# be replaced _after_ client creation? We'd just have to fallback to this -# default at conversion time instead of instantiation time. -def default() -> PayloadConverter: - return CompositePayloadConverter( - BinaryNullPayloadConverter(), - BinaryPlainPayloadConverter(), - JSONProtoPayloadConverter(), - BinaryProtoPayloadConverter(), - JSONPlainPayloadConverter(), - ) diff --git a/temporalio/converter/converter.py b/temporalio/converter/converter.py deleted file mode 100644 index 02a59b768..000000000 --- a/temporalio/converter/converter.py +++ /dev/null @@ -1,81 +0,0 @@ -from abc import ABC, abstractmethod -from typing import Any, Optional, Tuple - -import temporalio.api.common.v1 - - -class PayloadConverter(ABC): - @abstractmethod - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - return None - - @abstractmethod - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - return (None, False) - - async def encode_multiple( - self, values: list[Any] - ) -> Optional[temporalio.api.common.v1.Payloads]: - payloads = [] - for value in values: - payload = await self.encode(value) - # Return if any payloads cannot be converted - if payload is None: - return None - payloads.append(payload) - return temporalio.api.common.v1.Payloads(payloads=payloads) - - async def decode_multiple( - self, payloads: temporalio.api.common.v1.Payloads - ) -> Tuple[list[Any], bool]: - values = [] - for payload in payloads.payloads: - value, ok = await self.decode(payload) - # Return if any values cannot be converted - if not ok: - return ([], False) - values.append(value) - return (values, True) - - -class CompositePayloadConverter(PayloadConverter): - _converters: list[PayloadConverter] - - def __init__(self, *converters: PayloadConverter) -> None: - self._converters = list(converters) - - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - for converter in self._converters: - payload = await converter.encode(value) - if payload is not None: - return payload - return None - - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - for converter in self._converters: - value, ok = await converter.decode(payload) - if ok: - return (value, True) - return (None, False) - - async def encode_multiple( - self, values: list[Any] - ) -> Optional[temporalio.api.common.v1.Payloads]: - for converter in self._converters: - payloads = await converter.encode_multiple(values) - if payloads is not None: - return payloads - return None - - async def decode_multiple( - self, payloads: temporalio.api.common.v1.Payloads - ) -> Tuple[list[Any], bool]: - for converter in self._converters: - values, ok = await converter.decode_multiple(payloads) - if ok: - return (values, True) - return ([], False) diff --git a/temporalio/converter/plain.py b/temporalio/converter/plain.py deleted file mode 100644 index f748c7660..000000000 --- a/temporalio/converter/plain.py +++ /dev/null @@ -1,78 +0,0 @@ -import dataclasses -import json -from dataclasses import dataclass -from typing import Any, Optional, Tuple, Type - -import temporalio.api.common.v1 -import temporalio.converter - - -class BinaryNullPayloadConverter(temporalio.converter.PayloadConverter): - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - if value is None: - return temporalio.api.common.v1.Payload( - metadata={"encoding": b"binary/null"} - ) - return None - - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - return (None, payload.metadata["encoding"] == b"binary/null") - - -class BinaryPlainPayloadConverter(temporalio.converter.PayloadConverter): - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - if isinstance(value, bytes): - return temporalio.api.common.v1.Payload( - metadata={"encoding": b"binary/plain"}, data=value - ) - return None - - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - return (payload.data, payload.metadata["encoding"] == b"binary/plain") - - -class JSONPlainPayloadConverter(temporalio.converter.PayloadConverter): - _encoder: Optional[Type[json.JSONEncoder]] - _decoder: Optional[Type[json.JSONDecoder]] - _dataclass_asdict: bool - _encoding: bytes - - # TODO(cretz): Document that it can be customized/reused, but the encoding should be changed - def __init__( - self, - *, - encoder: Optional[Type[json.JSONEncoder]] = None, - decoder: Optional[Type[json.JSONDecoder]] = None, - dataclass_asdict: bool = True, - encoding: str = "json/plain" - ) -> None: - super().__init__() - self._encoder = encoder - self._decoder = decoder - self._dataclass_asdict = dataclass_asdict - self._encoding = encoding.encode() - - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - if self._dataclass_asdict and dataclasses.is_dataclass(value): - value = dataclasses.asdict(value) - # We swallow JSON encode error and just return None - try: - return temporalio.api.common.v1.Payload( - metadata={"encoding": self._encoding}, - data=json.dumps(value, cls=self._encoder).encode(), - ) - except (RuntimeError, TypeError, ValueError): - return None - - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - if payload.metadata["encoding"] == self._encoding: - # We do not swallow JSON decode errors since we expect success due - # to already-matched encoding - return (json.loads(payload.data, cls=self._decoder), True) - return (None, False) diff --git a/temporalio/converter/proto.py b/temporalio/converter/proto.py deleted file mode 100644 index d595e3a63..000000000 --- a/temporalio/converter/proto.py +++ /dev/null @@ -1,56 +0,0 @@ -from typing import Any, Optional, Tuple - -import google.protobuf.json_format -import google.protobuf.message -import google.protobuf.symbol_database - -import temporalio.api.common.v1 -import temporalio.converter - -_sym_db = google.protobuf.symbol_database.Default() - - -class JSONProtoPayloadConverter(temporalio.converter.PayloadConverter): - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - if issubclass(value, google.protobuf.message.Message): - return temporalio.api.common.v1.Payload( - metadata={ - "encoding": b"json/protobuf", - "messageType": value.DESCRIPTOR.full_name, - }, - data=google.protobuf.json_format.MessageToJson(value).encode(), - ) - return None - - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - if payload.metadata["encoding"] == b"json/protobuf": - # This raises error if not found - value = _sym_db.GetSymbol(str(payload.metadata["messageType"]))() - google.protobuf.json_format.Parse(payload.data, value) - return (value, True) - return (None, False) - - -class BinaryProtoPayloadConverter(temporalio.converter.PayloadConverter): - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - if issubclass(value, google.protobuf.message.Message): - return temporalio.api.common.v1.Payload( - metadata={ - "encoding": b"binary/protobuf", - "messageType": value.DESCRIPTOR.full_name, - }, - data=value.SerializeToString(), - ) - return None - - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - if payload.metadata["encoding"] == b"binary/protobuf": - # This raises error if not found - value = _sym_db.GetSymbol(str(payload.metadata["messageType"]))() - value.ParseFromString(payload.data) - return (value, True) - return (None, False) diff --git a/tests/api/test_grpc_stub.py b/tests/api/test_grpc_stub.py index ad9025110..767e88ca7 100644 --- a/tests/api/test_grpc_stub.py +++ b/tests/api/test_grpc_stub.py @@ -1,5 +1,4 @@ import logging -from concurrent import futures import grpc diff --git a/tests/converter_test.py b/tests/converter_test.py new file mode 100644 index 000000000..28f2500f9 --- /dev/null +++ b/tests/converter_test.py @@ -0,0 +1,97 @@ +from dataclasses import dataclass + +import temporalio.api.common.v1 +import temporalio.converter + + +async def test_default(): + async def assert_payload( + input, expected_encoding, expected_data, *, expected_decoded_input=None + ): + payload = await temporalio.converter.default().encode(input) + # Only check none if that's the expected encoding + if expected_encoding is None: + assert payload is None + return + # Check encoding and data + if isinstance(expected_encoding, str): + expected_encoding = expected_encoding.encode() + assert payload.metadata["encoding"] == expected_encoding + if isinstance(expected_data, str): + expected_data = expected_data.encode() + assert payload.data == expected_data + # Decode and check + actual_input, ok = await temporalio.converter.default().decode(payload) + assert ok + if expected_decoded_input is None: + expected_decoded_input = input + assert actual_input == expected_decoded_input + return payload + + await assert_payload(None, "binary/null", "") + await assert_payload(b"some binary", "binary/plain", "some binary") + payload = await assert_payload( + temporalio.api.common.v1.WorkflowExecution(workflow_id="id1", run_id="id2"), + "json/protobuf", + '{"runId":"id2","workflowId":"id1"}', + ) + assert ( + payload.metadata["messageType"] == b"temporal.api.common.v1.WorkflowExecution" + ) + await assert_payload( + {"foo": "bar", "baz": "qux"}, "json/plain", '{"baz":"qux","foo":"bar"}' + ) + await assert_payload("somestr", "json/plain", '"somestr"') + await assert_payload(1234, "json/plain", "1234") + await assert_payload(12.34, "json/plain", "12.34") + await assert_payload(True, "json/plain", "true") + await assert_payload(False, "json/plain", "false") + + class NonSerializableClass: + pass + + await assert_payload(NonSerializableClass(), None, None) + + @dataclass + class MyDataClass: + foo: str + bar: int + + await assert_payload( + MyDataClass(foo="somestr", bar=123), + "json/plain", + '{"bar":123,"foo":"somestr"}', + expected_decoded_input={"foo": "somestr", "bar": 123}, + ) + + +async def test_binary_proto(): + # We have to test this separately because by default it never encodes + # anything + conv = temporalio.converter.BinaryProtoPayloadConverter() + proto = temporalio.api.common.v1.WorkflowExecution(workflow_id="id1", run_id="id2") + payload = await conv.encode(proto) + assert payload.metadata["encoding"] == b"binary/protobuf" + assert ( + payload.metadata["messageType"] == b"temporal.api.common.v1.WorkflowExecution" + ) + assert payload.data == proto.SerializeToString() + decoded, ok = await conv.decode(payload) + assert ok + assert decoded == proto + + +async def test_multiple(): + payloads = await temporalio.converter.default().encode_multiple( + [{"foo": "bar"}, {"baz": "qux"}] + ) + assert len(payloads.payloads) == 2 + assert payloads.payloads[0].metadata["encoding"] == b"json/plain" + assert payloads.payloads[0].data == b'{"foo":"bar"}' + assert payloads.payloads[1].metadata["encoding"] == b"json/plain" + assert payloads.payloads[1].data == b'{"baz":"qux"}' + values, ok = await temporalio.converter.default().decode_multiple(payloads) + assert ok + assert len(values) == 2 + assert values[0] == {"foo": "bar"} + assert values[1] == {"baz": "qux"} From 44311564868d7487969d7092ff9f38b8cfa23428 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Mon, 31 Jan 2022 14:46:09 -0600 Subject: [PATCH 08/15] Rework converters to support payload converter and type hint --- temporalio/client/client.py | 6 +- temporalio/converter.py | 263 +++++++++++++++++++----------------- tests/converter_test.py | 68 +++++----- 3 files changed, 179 insertions(+), 158 deletions(-) diff --git a/temporalio/client/client.py b/temporalio/client/client.py index b89f67dc9..3575c39cb 100644 --- a/temporalio/client/client.py +++ b/temporalio/client/client.py @@ -28,7 +28,7 @@ async def connect( addr: str, *, namespace: str = "default", - payload_converter: temporalio.converter.PayloadConverter = temporalio.converter.default(), + data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), headers: Mapping[str, str] = {}, identity: Optional[str] = None, tls_config: Optional[TLSConfig] = None, @@ -37,7 +37,7 @@ async def connect( client = Client( addr, namespace=namespace, - payload_converter=payload_converter, + data_converter=data_converter, headers=headers, identity=identity, tls_config=tls_config, @@ -53,7 +53,7 @@ def __init__( addr: str, *, namespace: str = "default", - payload_converter: temporalio.converter.PayloadConverter = temporalio.converter.default(), + data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), headers: Mapping[str, str] = {}, identity: Optional[str] = None, tls_config: Optional[TLSConfig] = None, diff --git a/temporalio/converter.py b/temporalio/converter.py index 2ee9bd434..151cd8770 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -1,6 +1,7 @@ """Base converter and default implementations for conversion to/from values/payloads.""" import dataclasses +import inspect import json from abc import ABC, abstractmethod from typing import Any, Optional, Tuple, Type @@ -12,8 +13,52 @@ import temporalio.api.common.v1 +class DataConverter(ABC): + """Base converter to/from multiple payloads/values.""" + + @abstractmethod + async def encode(self, values: list[Any]) -> list[temporalio.api.common.v1.Payload]: + """Encode values into payloads. + + Args: + values: Values to be converted. + + Returns: + Converted payloads. Note, this does not have to be the same number + as values given, but at least one must be present. + + Raises: + Exception: Any issue during conversion. + """ + raise NotImplementedError + + @abstractmethod + async def decode( + self, + payloads: list[temporalio.api.common.v1.Payload], + type_hints: Optional[list[Type]], + ) -> list[Any]: + """Decode payloads into values. + + Args: + payloads: Payloads to convert to Python values. + type_hints: Types that are expected if any. This may not have any + types if there are no annotations on the target. If this is + present, it must have the exact same length as payloads even if + the values are just "object". + + Return: + Collection of Python values. Note, this does not have to be the same + number as values given, but at least one must be present. + + Raises: + Exception: Any issue during conversion. + """ + raise NotImplementedError + + class PayloadConverter(ABC): - """Base converter to/from values/payloads.""" + """Base converter to/from single payload/value.""" @abstractmethod async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: @@ -29,12 +74,16 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] @abstractmethod async def decode( - self, payload: temporalio.api.common.v1.Payload + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, ) -> Tuple[Any, bool]: """Decode a single payload to a Python value if able. Args: payload: Payload to convert to Python value. + type_hints: Type that is expected if any. This may not have a type + if there are no annotations on the target. Return: A tuple with the first value as the Python value and the second as @@ -45,127 +94,80 @@ async def decode( """ return (None, False) - async def encode_multiple( - self, values: list[Any] - ) -> Optional[temporalio.api.common.v1.Payloads]: - """Encode multiple values into payloads if able. - - Values are expected to be of a common payload type/encoding. The default - implementation makes one payload for each value but subclasses may alter - that. - - Args: - values: List of values to convert. - """ - payloads = [] - for value in values: - payload = await self.encode(value) - # Return if any payloads cannot be converted - if payload is None: - return None - payloads.append(payload) - return temporalio.api.common.v1.Payloads(payloads=payloads) - - async def decode_multiple( - self, payloads: temporalio.api.common.v1.Payloads - ) -> Tuple[list[Any], bool]: - """Decode multiple payloads into Python values if able. - - Payloads are expected to be of a common type/encoding. The default - implementation makes value for each payload but subclasses may alter - that. - Args: - payloads: Payloads to convert to Python values. - - Return: - A tuple with the first value as a collection of Python values and - the second as whether it could be converted or not. If the payloads - can be converted, they are the first values of the tuple and the - second value is True. If the payloads cannot be converted, the first - value is undefined and the second value is False. - """ - values = [] - for payload in payloads.payloads: - value, ok = await self.decode(payload) - # Return if any values cannot be converted - if not ok: - return ([], False) - values.append(value) - return (values, True) +class CompositeDataConverter(DataConverter): + """Composite data converter that delegates to a list of payload converters. - -class CompositePayloadConverter(PayloadConverter): - """Composite converter that delegates to a list of converters. - - Encoding/decoding are attempted on each converter successively until it - succeeds. + Encoding/decoding are attempted on each payload converter successively until + it succeeds. Attributes: - converters: List of converters to delegate to, in order. + converters: List of payload converters to delegate to, in order. """ converters: list[PayloadConverter] def __init__(self, *converters: PayloadConverter) -> None: - """Initializes the converter. + """Initializes the data converter. Args: - converters: Converters to delegate to, in order. + converters: Payload converters to delegate to, in order. """ self.converters = list(converters) - async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: - """Encode value trying each converter. See base class.""" - for converter in self.converters: - payload = await converter.encode(value) - if payload is not None: - return payload - return None - - async def decode( - self, payload: temporalio.api.common.v1.Payload - ) -> Tuple[Any, bool]: - """Decode payload trying each converter. See base class.""" - for converter in self.converters: - value, ok = await converter.decode(payload) - if ok: - return (value, True) - return (None, False) + async def encode(self, values: list[Any]) -> list[temporalio.api.common.v1.Payload]: + """Encode values trying each converter. - async def encode_multiple( - self, values: list[Any] - ) -> Optional[temporalio.api.common.v1.Payloads]: - """Encode multiple values trying each converter. See base class. - - Note, this attempts full encode_multiple calls on the delegated - converter. It does not allow different converters to encode different - values. A single converter must be able to convert them all at once. + See base class. Always returns the same number of payloads as values. """ - for converter in self.converters: - payloads = await converter.encode_multiple(values) - if payloads is not None: - return payloads - return None + payloads = [] + for index, value in enumerate(values): + # We intentionally attempt these serially just in case a stateful + # converter may rely on the previous values + payload = None + for converter in self.converters: + payload = await converter.encode(value) + if payload is not None: + break + if payload is None: + raise RuntimeError( + f"Value at index {index} of type {type(value)} could not be converted" + ) + payloads.append(payload) + return payloads - async def decode_multiple( - self, payloads: temporalio.api.common.v1.Payloads - ) -> Tuple[list[Any], bool]: - """Decode multiple payloads trying each converter. See base class. + async def decode( + self, + payloads: list[temporalio.api.common.v1.Payload], + type_hints: Optional[list[Type]], + ) -> list[Any]: + """Decode values trying each converter. - Note, this attempts full decode_multiple calls on the delegated - converter. It does not allow different converters to decode different - payloads. A single converter must be able to convert them all at once. + See base class. Always returns the same number of values as payloads. """ - for converter in self.converters: - values, ok = await converter.decode_multiple(payloads) - if ok: - return (values, True) - return ([], False) + values = [] + for index, payload in enumerate(payloads): + type_hint = None + if type_hints is not None: + type_hint = type_hints[index] + # We intentionally attempt these serially just in case a stateful + # converter may rely on the previous values + ok = False + for converter in self.converters: + value, ok = await converter.decode(payload, type_hint) + if ok: + break + if not ok: + encoding = payload.metadata.get("encoding", b"").decode() + raise RuntimeError( + f"Payload at index {index} with encoding '{encoding}' could not be converted" + ) + values.append(value) + return values class BinaryNullPayloadConverter(PayloadConverter): - """Converter for binary/null payloads supporting None values.""" + """Converter for 'binary/null' payloads supporting None values.""" async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" @@ -176,13 +178,15 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] return None async def decode( - self, payload: temporalio.api.common.v1.Payload + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, ) -> Tuple[Any, bool]: return (None, payload.metadata["encoding"] == b"binary/null") class BinaryPlainPayloadConverter(PayloadConverter): - """Converter for binary/plain payloads supporting bytes values.""" + """Converter for 'binary/plain' payloads supporting bytes values.""" async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" @@ -193,7 +197,9 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] return None async def decode( - self, payload: temporalio.api.common.v1.Payload + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, ) -> Tuple[Any, bool]: """See base class.""" return (payload.data, payload.metadata["encoding"] == b"binary/plain") @@ -203,7 +209,7 @@ async def decode( class JSONProtoPayloadConverter(PayloadConverter): - """Converter for json/protobuf payloads supporting protobuf Message values.""" + """Converter for 'json/protobuf' payloads supporting protobuf Message values.""" async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" @@ -225,7 +231,9 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] return None async def decode( - self, payload: temporalio.api.common.v1.Payload + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, ) -> Tuple[Any, bool]: """See base class.""" if payload.metadata["encoding"] == b"json/protobuf": @@ -237,7 +245,7 @@ async def decode( class BinaryProtoPayloadConverter(PayloadConverter): - """Converter for binary/protobuf payloads supporting protobuf Message values.""" + """Converter for 'binary/protobuf' payloads supporting protobuf Message values.""" async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" @@ -252,7 +260,9 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] return None async def decode( - self, payload: temporalio.api.common.v1.Payload + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, ) -> Tuple[Any, bool]: """See base class.""" if payload.metadata["encoding"] == b"binary/protobuf": @@ -264,17 +274,16 @@ async def decode( class JSONPlainPayloadConverter(PayloadConverter): - """Converter for json/plain payloads supporting common Python values. + """Converter for 'json/plain' payloads supporting common Python values. This supports all values that :py:func:`json.dump` supports and also adds encoding support for :py:mod:`dataclasses` by converting them using - :py:func:`dataclasses.asdict`. Note that on decode they come back as dict as - well and the caller must convert back to a data class. + :py:func:`dataclasses.asdict`. Note that on decode, if there is a type hint, + it will be used to construct the data class. """ _encoder: Optional[Type[json.JSONEncoder]] _decoder: Optional[Type[json.JSONDecoder]] - _dataclass_asdict: bool _encoding: bytes def __init__( @@ -282,26 +291,23 @@ def __init__( *, encoder: Optional[Type[json.JSONEncoder]] = None, decoder: Optional[Type[json.JSONDecoder]] = None, - dataclass_asdict: bool = True, - encoding: str = "json/plain" + encoding: str = "json/plain", ) -> None: """Initialize a JSON data converter. Args: encoder: Custom encoder class object to use. decoder: Custom decoder class object to use. - dataclass_asdict: Whether to support data class encoding. encoding: Encoding name to use. """ super().__init__() self._encoder = encoder self._decoder = decoder - self._dataclass_asdict = dataclass_asdict self._encoding = encoding.encode() async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" - if self._dataclass_asdict and dataclasses.is_dataclass(value): + if dataclasses.is_dataclass(value): value = dataclasses.asdict(value) # We swallow JSON encode error and just return None try: @@ -315,28 +321,39 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] return None async def decode( - self, payload: temporalio.api.common.v1.Payload + self, + payload: temporalio.api.common.v1.Payload, + type_hint: Optional[Type] = None, ) -> Tuple[Any, bool]: """See base class.""" if payload.metadata["encoding"] == self._encoding: # We do not swallow JSON decode errors since we expect success due # to already-matched encoding - return (json.loads(payload.data, cls=self._decoder), True) + obj = json.loads(payload.data, cls=self._decoder) + # If the object is a dict and the type hint is present for a data + # class, we instantiate the data class with the value + if ( + isinstance(obj, dict) + and inspect.isclass(type_hint) + and dataclasses.is_dataclass(type_hint) + ): + obj = type_hint(**obj) + return (obj, True) return (None, False) # TODO(cretz): Should this be a var that can be changed instead? If so, can it # be replaced _after_ client creation? We'd just have to fallback to this # default at conversion time instead of instantiation time. -def default() -> CompositePayloadConverter: +def default() -> CompositeDataConverter: """Default converter compatible with other Temporal SDKs. This handles None, bytes, all protobuf message types, and any type that :py:func:`json.dump` accepts. In addition, this supports encoding - :py:mod:`dataclasses` but not decoding them, so decoded data classes appear - as dicts which may need to be converted to data classes by users. + :py:mod:`dataclasses` and also decoding them provided the data class is in + the type hint. """ - return CompositePayloadConverter( + return CompositeDataConverter( BinaryNullPayloadConverter(), BinaryPlainPayloadConverter(), JSONProtoPayloadConverter(), diff --git a/tests/converter_test.py b/tests/converter_test.py index 28f2500f9..7296944b7 100644 --- a/tests/converter_test.py +++ b/tests/converter_test.py @@ -1,33 +1,40 @@ from dataclasses import dataclass +import pytest + import temporalio.api.common.v1 import temporalio.converter async def test_default(): async def assert_payload( - input, expected_encoding, expected_data, *, expected_decoded_input=None + input, + expected_encoding, + expected_data, + *, + expected_decoded_input=None, + type_hint=None ): - payload = await temporalio.converter.default().encode(input) - # Only check none if that's the expected encoding - if expected_encoding is None: - assert payload is None - return + payloads = await temporalio.converter.default().encode([input]) # Check encoding and data + assert len(payloads) == 1 if isinstance(expected_encoding, str): expected_encoding = expected_encoding.encode() - assert payload.metadata["encoding"] == expected_encoding + assert payloads[0].metadata["encoding"] == expected_encoding if isinstance(expected_data, str): expected_data = expected_data.encode() - assert payload.data == expected_data + assert payloads[0].data == expected_data # Decode and check - actual_input, ok = await temporalio.converter.default().decode(payload) - assert ok + actual_inputs = await temporalio.converter.default().decode( + payloads, [type_hint] + ) + assert len(actual_inputs) == 1 if expected_decoded_input is None: expected_decoded_input = input - assert actual_input == expected_decoded_input - return payload + assert actual_inputs[0] == expected_decoded_input + return payloads[0] + # Basic types await assert_payload(None, "binary/null", "") await assert_payload(b"some binary", "binary/plain", "some binary") payload = await assert_payload( @@ -47,16 +54,21 @@ async def assert_payload( await assert_payload(True, "json/plain", "true") await assert_payload(False, "json/plain", "false") - class NonSerializableClass: - pass + # Unknown type + with pytest.raises(RuntimeError) as excinfo: + + class NonSerializableClass: + pass - await assert_payload(NonSerializableClass(), None, None) + await assert_payload(NonSerializableClass(), None, None) + assert "could not be converted" in str(excinfo.value) @dataclass class MyDataClass: foo: str bar: int + # Data class without type hint is just dict await assert_payload( MyDataClass(foo="somestr", bar=123), "json/plain", @@ -64,10 +76,18 @@ class MyDataClass: expected_decoded_input={"foo": "somestr", "bar": 123}, ) + # Data class with type hint reconstructs the class + await assert_payload( + MyDataClass(foo="somestr", bar=123), + "json/plain", + '{"bar":123,"foo":"somestr"}', + type_hint=MyDataClass, + ) + async def test_binary_proto(): # We have to test this separately because by default it never encodes - # anything + # anything since JSON proto takes precedence conv = temporalio.converter.BinaryProtoPayloadConverter() proto = temporalio.api.common.v1.WorkflowExecution(workflow_id="id1", run_id="id2") payload = await conv.encode(proto) @@ -79,19 +99,3 @@ async def test_binary_proto(): decoded, ok = await conv.decode(payload) assert ok assert decoded == proto - - -async def test_multiple(): - payloads = await temporalio.converter.default().encode_multiple( - [{"foo": "bar"}, {"baz": "qux"}] - ) - assert len(payloads.payloads) == 2 - assert payloads.payloads[0].metadata["encoding"] == b"json/plain" - assert payloads.payloads[0].data == b'{"foo":"bar"}' - assert payloads.payloads[1].metadata["encoding"] == b"json/plain" - assert payloads.payloads[1].data == b'{"baz":"qux"}' - values, ok = await temporalio.converter.default().decode_multiple(payloads) - assert ok - assert len(values) == 2 - assert values[0] == {"foo": "bar"} - assert values[1] == {"baz": "qux"} From 4192a77cdc21cc94ed6869e06ce50bebf3b873d9 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Tue, 1 Feb 2022 10:00:10 -0600 Subject: [PATCH 09/15] Remove client and common packages, rework some converter approach, update python version in CI --- .github/workflows/ci.yml | 2 +- scripts/gen-protos.py | 5 +- temporalio/__init__.py | 5 - temporalio/client/__init__.py | 4 - temporalio/client/client.py | 131 ------------------ temporalio/client/workflow_execution.py | 2 - temporalio/client/workflow_handle.py | 39 ------ temporalio/client/workflow_service.py | 14 -- temporalio/common/__init__.py | 1 - temporalio/common/retry_policy.py | 13 -- temporalio/converter.py | 176 +++++++++++++++--------- tests/converter_test.py | 5 +- 12 files changed, 117 insertions(+), 280 deletions(-) delete mode 100644 temporalio/client/__init__.py delete mode 100644 temporalio/client/client.py delete mode 100644 temporalio/client/workflow_execution.py delete mode 100644 temporalio/client/workflow_handle.py delete mode 100644 temporalio/client/workflow_service.py delete mode 100644 temporalio/common/__init__.py delete mode 100644 temporalio/common/retry_policy.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 76b217213..c77ce0c50 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -12,7 +12,7 @@ jobs: strategy: fail-fast: true matrix: - python: ["3.9"] + python: ["3.7", "3.10"] os: [ubuntu-latest] # TODO: macos-latest, windows-latest runs-on: ${{ matrix.os }} steps: diff --git a/scripts/gen-protos.py b/scripts/gen-protos.py index 4a6129961..eb6a3ea15 100644 --- a/scripts/gen-protos.py +++ b/scripts/gen-protos.py @@ -1,6 +1,5 @@ #!/usr/bin/env python3 import collections -import os import re import shutil import subprocess @@ -8,7 +7,7 @@ import tempfile from functools import partial from pathlib import Path -from typing import Mapping +from typing import List, Mapping base_dir = Path(__file__).parent.parent proto_dir = base_dir / "temporalio" / "bridge" / "sdk-core" / "protos" @@ -43,7 +42,7 @@ def fix_generated_output(base_path: Path): (https://github.com/protocolbuffers/protobuf/issues/1491) """ - imports: Mapping[str, list[str]] = collections.defaultdict(list) + imports: Mapping[str, List[str]] = collections.defaultdict(list) for p in base_path.iterdir(): if p.is_dir(): fix_generated_output(p) diff --git a/temporalio/__init__.py b/temporalio/__init__.py index f957d5aea..e69de29bb 100644 --- a/temporalio/__init__.py +++ b/temporalio/__init__.py @@ -1,5 +0,0 @@ -import typing - -from .common import RetryPolicy - -Convertible = typing.Any diff --git a/temporalio/client/__init__.py b/temporalio/client/__init__.py deleted file mode 100644 index 67e03f8c2..000000000 --- a/temporalio/client/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .client import Client, TLSConfig, WorkflowIDReusePolicy -from .workflow_execution import WorkflowExecution -from .workflow_handle import WorkflowHandle -from .workflow_service import WorkflowService diff --git a/temporalio/client/client.py b/temporalio/client/client.py deleted file mode 100644 index 3575c39cb..000000000 --- a/temporalio/client/client.py +++ /dev/null @@ -1,131 +0,0 @@ -from dataclasses import dataclass -from datetime import timedelta -from enum import Enum -from typing import Any, Awaitable, Mapping, Optional, Union - -import temporalio -import temporalio.client -import temporalio.converter - - -@dataclass -class TLSConfig: - server_root_ca_cert: Optional[Union[str, bytes]] = None - domain: Optional[str] = None - client_cert: Optional[Union[str, bytes]] = None - client_private_key: Optional[Union[str, bytes]] = None - - -class WorkflowIDReusePolicy(Enum): - ALLOW_DUPLICATE = 1 - ALLOW_DUPLICATE_FAILED_ONLY = 2 - REJECT_DUPLICATE = 3 - - -class Client: - @staticmethod - async def connect( - addr: str, - *, - namespace: str = "default", - data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), - headers: Mapping[str, str] = {}, - identity: Optional[str] = None, - tls_config: Optional[TLSConfig] = None, - retry_policy: Optional[temporalio.RetryPolicy] = None - ) -> "Client": - client = Client( - addr, - namespace=namespace, - data_converter=data_converter, - headers=headers, - identity=identity, - tls_config=tls_config, - retry_policy=retry_policy, - ) - await client.ready() - return client - - service: Awaitable[temporalio.client.WorkflowService] - - def __init__( - self, - addr: str, - *, - namespace: str = "default", - data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), - headers: Mapping[str, str] = {}, - identity: Optional[str] = None, - tls_config: Optional[TLSConfig] = None, - retry_policy: Optional[temporalio.RetryPolicy] = None - ) -> None: - raise NotImplementedError - - async def __aenter__(self) -> "Client": - await self.ready() - return self - - async def __aexit__(self) -> None: - await self.close() - - async def ready(self) -> None: - raise NotImplementedError - - async def close(self) -> None: - raise NotImplementedError - - async def start_workflow( - self, - workflow: str, - *args: Any, - task_queue: str, - id: Optional[str] = None, - execution_timeout: Optional[timedelta] = None, - run_timeout: Optional[timedelta] = None, - task_timeout: Optional[timedelta] = None, - id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: Optional[temporalio.RetryPolicy] = None, - cron_schedule: Optional[str] = None, - memo: Mapping[str, temporalio.Convertible] = {}, - search_attributes: Mapping[str, temporalio.Convertible] = {}, - header: Mapping[str, temporalio.Convertible] = {} - ) -> temporalio.client.WorkflowHandle[Optional[temporalio.Convertible]]: - raise NotImplementedError - - async def execute_workflow( - self, - workflow: str, - *args: Any, - task_queue: str, - id: Optional[str] = None, - execution_timeout: Optional[timedelta] = None, - run_timeout: Optional[timedelta] = None, - task_timeout: Optional[timedelta] = None, - id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: Optional[temporalio.RetryPolicy] = None, - cron_schedule: Optional[str] = None, - memo: Mapping[str, temporalio.Convertible] = {}, - search_attributes: Mapping[str, temporalio.Convertible] = {}, - header: Mapping[str, temporalio.Convertible] = {} - ) -> Optional[temporalio.Convertible]: - handle = await self.start_workflow( - workflow, - *args, - task_queue=task_queue, - id=id, - execution_timeout=execution_timeout, - run_timeout=run_timeout, - task_timeout=task_timeout, - id_reuse_policy=id_reuse_policy, - retry_policy=retry_policy, - cron_schedule=cron_schedule, - memo=memo, - search_attributes=search_attributes, - header=header - ) - return await handle.result() - - def get_workflow_handle( - self, id: str, run_id: Optional[str] = None - ) -> temporalio.client.WorkflowHandle[Optional[temporalio.Convertible]]: - return temporalio.client.WorkflowHandle(self, id, run_id) diff --git a/temporalio/client/workflow_execution.py b/temporalio/client/workflow_execution.py deleted file mode 100644 index b70753aae..000000000 --- a/temporalio/client/workflow_execution.py +++ /dev/null @@ -1,2 +0,0 @@ -class WorkflowExecution: - pass diff --git a/temporalio/client/workflow_handle.py b/temporalio/client/workflow_handle.py deleted file mode 100644 index a377fbdd4..000000000 --- a/temporalio/client/workflow_handle.py +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Generic, Optional, TypeVar - -import temporalio -import temporalio.client - -T = TypeVar("T") - - -class WorkflowHandle(Generic[T]): - _client: temporalio.client.Client - id: str - run_id: Optional[str] = None - - def __init__( - self, client: temporalio.client.Client, id: str, run_id: Optional[str] = None - ) -> None: - self._client = client - self.id = id - self.run_id = run_id - - async def result(self) -> T: - raise NotImplementedError - - async def cancel(self) -> None: - raise NotImplementedError - - async def describe(self) -> temporalio.client.WorkflowExecution: - raise NotImplementedError - - async def query( - self, name: str, *args: temporalio.Convertible - ) -> temporalio.Convertible: - raise NotImplementedError - - async def signal(self, name: str, *args: temporalio.Convertible) -> None: - raise NotImplementedError - - async def terminate(self, *, reason: Optional[str] = None) -> None: - raise NotImplementedError diff --git a/temporalio/client/workflow_service.py b/temporalio/client/workflow_service.py deleted file mode 100644 index b62e87158..000000000 --- a/temporalio/client/workflow_service.py +++ /dev/null @@ -1,14 +0,0 @@ -from temporalio.api.workflowservice.v1 import ( - StartWorkflowExecutionRequest, - StartWorkflowExecutionResponse, -) - - -class WorkflowService: - def __init__(self): - raise NotImplementedError - - async def start_workflow_execution( - self, request: StartWorkflowExecutionRequest - ) -> StartWorkflowExecutionResponse: - raise NotImplementedError diff --git a/temporalio/common/__init__.py b/temporalio/common/__init__.py deleted file mode 100644 index 34150a0e5..000000000 --- a/temporalio/common/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .retry_policy import RetryPolicy diff --git a/temporalio/common/retry_policy.py b/temporalio/common/retry_policy.py deleted file mode 100644 index c22659cdb..000000000 --- a/temporalio/common/retry_policy.py +++ /dev/null @@ -1,13 +0,0 @@ -from dataclasses import dataclass -from datetime import timedelta -from typing import Optional - - -@dataclass -class RetryPolicy: - initial_interval: timedelta - randomization_factor: float - multiplier: float - max_interval: timedelta - max_elapsed_time: Optional[timedelta] - max_retries: int diff --git a/temporalio/converter.py b/temporalio/converter.py index 151cd8770..1f1b62ded 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -4,7 +4,7 @@ import inspect import json from abc import ABC, abstractmethod -from typing import Any, Optional, Tuple, Type +from typing import Any, List, Mapping, Optional, Type import google.protobuf.json_format import google.protobuf.message @@ -17,7 +17,7 @@ class DataConverter(ABC): """Base converter to/from multiple payloads/values.""" @abstractmethod - async def encode(self, values: list[Any]) -> list[temporalio.api.common.v1.Payload]: + async def encode(self, values: List[Any]) -> List[temporalio.api.common.v1.Payload]: """Encode values into payloads. Args: @@ -35,9 +35,9 @@ async def encode(self, values: list[Any]) -> list[temporalio.api.common.v1.Paylo @abstractmethod async def decode( self, - payloads: list[temporalio.api.common.v1.Payload], - type_hints: Optional[list[Type]], - ) -> list[Any]: + payloads: List[temporalio.api.common.v1.Payload], + type_hints: Optional[List[Type]], + ) -> List[Any]: """Decode payloads into values. Args: @@ -60,6 +60,12 @@ async def decode( class PayloadConverter(ABC): """Base converter to/from single payload/value.""" + @property + @abstractmethod + def encoding(self) -> str: + """Encoding for the payload this converter works with.""" + raise NotImplementedError + @abstractmethod async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """Encode a single value to a payload or None. @@ -69,16 +75,21 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] Returns: Payload of the value or None if unable to convert. + + Raises: + TypeError: Value is not the expected type. + ValueError: Value is of the expected type but otherwise incorrect. + RuntimeError: General error during encoding. """ - return None + raise NotImplementedError @abstractmethod async def decode( self, payload: temporalio.api.common.v1.Payload, type_hint: Optional[Type] = None, - ) -> Tuple[Any, bool]: - """Decode a single payload to a Python value if able. + ) -> Any: + """Decode a single payload to a Python value or raise exception. Args: payload: Payload to convert to Python value. @@ -86,13 +97,14 @@ async def decode( if there are no annotations on the target. Return: - A tuple with the first value as the Python value and the second as - whether it could be converted or not. If the payload can be - converted, it is the first value of the tuple and the second value - is True. If the payload cannot be converted, the first value is - undefined and the second value is False. + The decoded value from the payload. Since the encoding is checked by + the caller, this should raise an exception if the payload cannot be + converted. + + Raises: + RuntimeError: General error during decoding. """ - return (None, False) + raise NotImplementedError class CompositeDataConverter(DataConverter): @@ -105,7 +117,7 @@ class CompositeDataConverter(DataConverter): converters: List of payload converters to delegate to, in order. """ - converters: list[PayloadConverter] + converters: Mapping[bytes, PayloadConverter] def __init__(self, *converters: PayloadConverter) -> None: """Initializes the data converter. @@ -113,67 +125,77 @@ def __init__(self, *converters: PayloadConverter) -> None: Args: converters: Payload converters to delegate to, in order. """ - self.converters = list(converters) + # Insertion order preserved here + self.converters = {c.encoding.encode(): c for c in converters} - async def encode(self, values: list[Any]) -> list[temporalio.api.common.v1.Payload]: + async def encode(self, values: List[Any]) -> List[temporalio.api.common.v1.Payload]: """Encode values trying each converter. See base class. Always returns the same number of payloads as values. + + Raises: + RuntimeError: No known converter """ payloads = [] for index, value in enumerate(values): # We intentionally attempt these serially just in case a stateful # converter may rely on the previous values payload = None - for converter in self.converters: + for converter in self.converters.values(): payload = await converter.encode(value) if payload is not None: break if payload is None: raise RuntimeError( - f"Value at index {index} of type {type(value)} could not be converted" + f"Value at index {index} of type {type(value)} has no known converter" ) payloads.append(payload) return payloads async def decode( self, - payloads: list[temporalio.api.common.v1.Payload], - type_hints: Optional[list[Type]], - ) -> list[Any]: + payloads: List[temporalio.api.common.v1.Payload], + type_hints: Optional[List[Type]], + ) -> List[Any]: """Decode values trying each converter. See base class. Always returns the same number of values as payloads. + + Raises: + KeyError: Unknown payload encoding + RuntimeError: Error during decode """ values = [] for index, payload in enumerate(payloads): + encoding = payload.metadata.get("encoding", b"") + converter = self.converters.get(encoding) + if converter is None: + raise KeyError(f"Unknown payload encoding {encoding.decode()}") type_hint = None if type_hints is not None: type_hint = type_hints[index] - # We intentionally attempt these serially just in case a stateful - # converter may rely on the previous values - ok = False - for converter in self.converters: - value, ok = await converter.decode(payload, type_hint) - if ok: - break - if not ok: - encoding = payload.metadata.get("encoding", b"").decode() + try: + values.append(await converter.decode(payload, type_hint)) + except RuntimeError as err: raise RuntimeError( - f"Payload at index {index} with encoding '{encoding}' could not be converted" - ) - values.append(value) + f"Payload at index {index} with encoding {encoding.decode()} could not be converted" + ) from err return values class BinaryNullPayloadConverter(PayloadConverter): """Converter for 'binary/null' payloads supporting None values.""" + @property + def encoding(self) -> str: + """See base class.""" + return "binary/null" + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" if value is None: return temporalio.api.common.v1.Payload( - metadata={"encoding": b"binary/null"} + metadata={"encoding": self.encoding.encode()} ) return None @@ -181,18 +203,25 @@ async def decode( self, payload: temporalio.api.common.v1.Payload, type_hint: Optional[Type] = None, - ) -> Tuple[Any, bool]: - return (None, payload.metadata["encoding"] == b"binary/null") + ) -> Any: + if len(payload.data) > 0: + raise RuntimeError("Expected empty data set for binary/null") + return None class BinaryPlainPayloadConverter(PayloadConverter): """Converter for 'binary/plain' payloads supporting bytes values.""" + @property + def encoding(self) -> str: + """See base class.""" + return "binary/plain" + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" if isinstance(value, bytes): return temporalio.api.common.v1.Payload( - metadata={"encoding": b"binary/plain"}, data=value + metadata={"encoding": self.encoding.encode()}, data=value ) return None @@ -200,9 +229,9 @@ async def decode( self, payload: temporalio.api.common.v1.Payload, type_hint: Optional[Type] = None, - ) -> Tuple[Any, bool]: + ) -> Any: """See base class.""" - return (payload.data, payload.metadata["encoding"] == b"binary/plain") + return payload.data _sym_db = google.protobuf.symbol_database.Default() @@ -211,6 +240,11 @@ async def decode( class JSONProtoPayloadConverter(PayloadConverter): """Converter for 'json/protobuf' payloads supporting protobuf Message values.""" + @property + def encoding(self) -> str: + """See base class.""" + return "json/protobuf" + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" if isinstance(value, google.protobuf.message.Message): @@ -223,7 +257,7 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] ) return temporalio.api.common.v1.Payload( metadata={ - "encoding": b"json/protobuf", + "encoding": self.encoding.encode(), "messageType": value.DESCRIPTOR.full_name.encode(), }, data=json_str.encode(), @@ -234,25 +268,32 @@ async def decode( self, payload: temporalio.api.common.v1.Payload, type_hint: Optional[Type] = None, - ) -> Tuple[Any, bool]: + ) -> Any: """See base class.""" - if payload.metadata["encoding"] == b"json/protobuf": - # This raises error if not found - value = _sym_db.GetSymbol(payload.metadata["messageType"].decode())() - google.protobuf.json_format.Parse(payload.data, value) - return (value, True) - return (None, False) + message_type = payload.metadata.get("messageType", b"").decode() + try: + value = _sym_db.GetSymbol(message_type)() + return google.protobuf.json_format.Parse(payload.data, value) + except KeyError as err: + raise RuntimeError(f"Unknown Protobuf type {message_type}") from err + except google.protobuf.json_format.ParseError as err: + raise RuntimeError("Failed parsing") from err class BinaryProtoPayloadConverter(PayloadConverter): """Converter for 'binary/protobuf' payloads supporting protobuf Message values.""" + @property + def encoding(self) -> str: + """See base class.""" + return "binary/protobuf" + async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" if isinstance(value, google.protobuf.message.Message): return temporalio.api.common.v1.Payload( metadata={ - "encoding": b"binary/protobuf", + "encoding": self.encoding.encode(), "messageType": value.DESCRIPTOR.full_name.encode(), }, data=value.SerializeToString(), @@ -263,14 +304,17 @@ async def decode( self, payload: temporalio.api.common.v1.Payload, type_hint: Optional[Type] = None, - ) -> Tuple[Any, bool]: + ) -> Any: """See base class.""" - if payload.metadata["encoding"] == b"binary/protobuf": - # This raises error if not found - value = _sym_db.GetSymbol(payload.metadata["messageType"].decode())() + message_type = payload.metadata.get("messageType", b"").decode() + try: + value = _sym_db.GetSymbol(message_type)() value.ParseFromString(payload.data) - return (value, True) - return (None, False) + return value + except KeyError as err: + raise RuntimeError(f"Unknown Protobuf type {message_type}") from err + except google.protobuf.message.DecodeError as err: + raise RuntimeError("Failed parsing") from err class JSONPlainPayloadConverter(PayloadConverter): @@ -284,7 +328,7 @@ class JSONPlainPayloadConverter(PayloadConverter): _encoder: Optional[Type[json.JSONEncoder]] _decoder: Optional[Type[json.JSONDecoder]] - _encoding: bytes + _encoding: str def __init__( self, @@ -303,7 +347,12 @@ def __init__( super().__init__() self._encoder = encoder self._decoder = decoder - self._encoding = encoding.encode() + self._encoding = encoding + + @property + def encoding(self) -> str: + """See base class.""" + return self._encoding async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" @@ -312,7 +361,7 @@ async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload] # We swallow JSON encode error and just return None try: return temporalio.api.common.v1.Payload( - metadata={"encoding": self._encoding}, + metadata={"encoding": self._encoding.encode()}, data=json.dumps( value, cls=self._encoder, separators=(",", ":"), sort_keys=True ).encode(), @@ -324,11 +373,9 @@ async def decode( self, payload: temporalio.api.common.v1.Payload, type_hint: Optional[Type] = None, - ) -> Tuple[Any, bool]: + ) -> Any: """See base class.""" - if payload.metadata["encoding"] == self._encoding: - # We do not swallow JSON decode errors since we expect success due - # to already-matched encoding + try: obj = json.loads(payload.data, cls=self._decoder) # If the object is a dict and the type hint is present for a data # class, we instantiate the data class with the value @@ -338,8 +385,9 @@ async def decode( and dataclasses.is_dataclass(type_hint) ): obj = type_hint(**obj) - return (obj, True) - return (None, False) + return obj + except json.JSONDecodeError as err: + raise RuntimeError("Failed parsing") from err # TODO(cretz): Should this be a var that can be changed instead? If so, can it diff --git a/tests/converter_test.py b/tests/converter_test.py index 7296944b7..ee7219061 100644 --- a/tests/converter_test.py +++ b/tests/converter_test.py @@ -61,7 +61,7 @@ class NonSerializableClass: pass await assert_payload(NonSerializableClass(), None, None) - assert "could not be converted" in str(excinfo.value) + assert "has no known converter" in str(excinfo.value) @dataclass class MyDataClass: @@ -96,6 +96,5 @@ async def test_binary_proto(): payload.metadata["messageType"] == b"temporal.api.common.v1.WorkflowExecution" ) assert payload.data == proto.SerializeToString() - decoded, ok = await conv.decode(payload) - assert ok + decoded = await conv.decode(payload) assert decoded == proto From 9c16fb7a9c5fe58a646f539689cb13b56ffb994a Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Tue, 1 Feb 2022 15:02:45 -0600 Subject: [PATCH 10/15] Switch to https://github.com/Sushisource/sdk-core branch of core-to-worker-impl in submodule --- .gitmodules | 2 +- temporalio/bridge/sdk-core | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index 80965358f..d1c4f4d05 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "sdk-core"] path = temporalio/bridge/sdk-core - url = git@github.com:temporalio/sdk-core.git + url = git@github.com:Sushisource/sdk-core.git diff --git a/temporalio/bridge/sdk-core b/temporalio/bridge/sdk-core index f58f6305b..0fa511302 160000 --- a/temporalio/bridge/sdk-core +++ b/temporalio/bridge/sdk-core @@ -1 +1 @@ -Subproject commit f58f6305b8e4b4cc21bcdd9637d669e9930d718f +Subproject commit 0fa511302a814d9e70ccd3b1ce0c17a72a3cfb53 From 4e6900f9d2ac778cf1e84f1a73df27a7e0a75aeb Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Wed, 2 Feb 2022 13:33:14 -0600 Subject: [PATCH 11/15] Work on core client implementation --- .gitignore | 1 + poetry.lock | 98 +- pyproject.toml | 4 +- scripts/build-bridge.py | 12 + temporalio/bridge/Cargo.lock | 2330 +++++++++++++++++++++++ temporalio/bridge/Cargo.toml | 21 + temporalio/bridge/client.py | 80 + temporalio/bridge/sdk-bridge/Cargo.toml | 18 - temporalio/bridge/src/lib.rs | 210 ++ temporalio/converter.py | 10 +- tests/bridge/__init__.py | 0 tests/bridge/test_client.py | 27 + 12 files changed, 2756 insertions(+), 55 deletions(-) create mode 100644 scripts/build-bridge.py create mode 100644 temporalio/bridge/Cargo.lock create mode 100644 temporalio/bridge/Cargo.toml create mode 100644 temporalio/bridge/client.py delete mode 100644 temporalio/bridge/sdk-bridge/Cargo.toml create mode 100644 temporalio/bridge/src/lib.rs create mode 100644 tests/bridge/__init__.py create mode 100644 tests/bridge/test_client.py diff --git a/.gitignore b/.gitignore index 69e1e7aee..88e10b9ae 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ temporalio/api/* !temporalio/api/__init__.py temporalio/bridge/proto/* !temporalio/bridge/proto/__init__.py +temporalio/bridge/target/ __pycache__ diff --git a/poetry.lock b/poetry.lock index 3f9179203..b88635d32 100644 --- a/poetry.lock +++ b/poetry.lock @@ -132,6 +132,17 @@ requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] plugins = ["setuptools"] +[[package]] +name = "maturin" +version = "0.12.6" +description = "Build and publish crates with pyo3, rust-cpython and cffi bindings as well as rust binaries as python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +toml = ">=0.10.2,<0.11.0" + [[package]] name = "mypy" version = "0.931" @@ -218,7 +229,7 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "protobuf" -version = "3.19.3" +version = "3.19.4" description = "Protocol Buffers" category = "dev" optional = false @@ -314,7 +325,7 @@ python-versions = ">=3.6" [[package]] name = "types-futures" -version = "3.3.7" +version = "3.3.8" description = "Typing stubs for futures" category = "main" optional = false @@ -322,7 +333,7 @@ python-versions = "*" [[package]] name = "types-protobuf" -version = "3.19.6" +version = "3.19.8" description = "Typing stubs for protobuf" category = "main" optional = false @@ -354,7 +365,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "2c3b7797e9c425dff9cb79f175ff7d4f1a1e2097f9c7a6e6728cd26f4e6bf4e0" +content-hash = "6f01485d25e3ac67af3d750d5995d2d38ad074270853130194bf98d1c97f5cbd" [metadata.files] atomicwrites = [ @@ -481,6 +492,25 @@ isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] +maturin = [ + {file = "maturin-0.12.6-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:7c36e8ee53fb6f544d8f1b1b480035bf204806494be5aa44394a278b5cf9b522"}, + {file = "maturin-0.12.6-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:90d6ed47fa51d902e0afba0b469652f76e1b0e799af2910712c5dec3d1343003"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8f703e15fefc1424d3d4e65db62364bf930cbc46cf0fdaf44b788f6d895dc8b7"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d502c69f55e21de00645005aa1e4562979c07e26264e5ca00c0369fac1419b96"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:55b9e04eaa6809ed2b6b62bf56e0cfc9cc700b0824ffa565ff3b6cbcff519a46"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da86507d756c478b0847ae673f49073643d48633d0297cfdf8a1af4205c8cdce"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff18b33ca7453c38868d3a8a74540052f12b5a67de87c8ed409c19ba6aad48f2"}, + {file = "maturin-0.12.6-py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a67e20421d5f9f82f9c22b2fe5aa787b21f2dbbb8456d939b57d6999e02c4ccf"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:52a278cae8905b52be760a01186647c94bb87bd7e5788ff1fa6e8fd53e075f7b"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:ef5fe408f5f1966d80bc78a65288501d721aed5a33a6bfe41c801504f266197d"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_i686.whl", hash = "sha256:f0f76f607652be769f2c3f3c41eb82b450901b6e17208c1086fd2e896056f5de"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:9829673dc54b306dd92ff3e5230a9e6e9704573ff2a4d1289728c277686113be"}, + {file = "maturin-0.12.6-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:1d77c2ed1ec225a5d013195605f32d8206230da441e1c95eb2627692acf68b44"}, + {file = "maturin-0.12.6-py3-none-win32.whl", hash = "sha256:1ecafea37426a49e2f5369b350c62a76a87ed6fb97d627e08d7349df99099dc8"}, + {file = "maturin-0.12.6-py3-none-win_amd64.whl", hash = "sha256:9c5476aae0d60215039d76d64175791c20741e7b6ab0174b6bb356289029890e"}, + {file = "maturin-0.12.6-py3-none-win_arm64.whl", hash = "sha256:3c28d685e0449f0110b1f5810643eca348a382adffef9cdd1a76cb181e3ddede"}, + {file = "maturin-0.12.6.tar.gz", hash = "sha256:2b14cfae808b45a130e19b2999acea423d2e10e7a29ae2336996ba72ba442ff6"}, +] mypy = [ {file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"}, {file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"}, @@ -528,32 +558,32 @@ pluggy = [ {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] protobuf = [ - {file = "protobuf-3.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1cb2ed66aac593adbf6dca4f07cd7ee7e2958b17bbc85b2cc8bc564ebeb258ec"}, - {file = "protobuf-3.19.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:898bda9cd37ec0c781b598891e86435de80c3bfa53eb483a9dac5a11ec93e942"}, - {file = "protobuf-3.19.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad761ef3be34c8bdc7285bec4b40372a8dad9e70cfbdc1793cd3cf4c1a4ce74"}, - {file = "protobuf-3.19.3-cp310-cp310-win32.whl", hash = "sha256:2cddcbcc222f3144765ccccdb35d3621dc1544da57a9aca7e1944c1a4fe3db11"}, - {file = "protobuf-3.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:6202df8ee8457cb00810c6e76ced480f22a1e4e02c899a14e7b6e6e1de09f938"}, - {file = "protobuf-3.19.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:397d82f1c58b76445469c8c06b8dee1ff67b3053639d054f52599a458fac9bc6"}, - {file = "protobuf-3.19.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e54b8650e849ee8e95e481024bff92cf98f5ec61c7650cb838d928a140adcb63"}, - {file = "protobuf-3.19.3-cp36-cp36m-win32.whl", hash = "sha256:3bf3a07d17ba3511fe5fa916afb7351f482ab5dbab5afe71a7a384274a2cd550"}, - {file = "protobuf-3.19.3-cp36-cp36m-win_amd64.whl", hash = "sha256:afa8122de8064fd577f49ae9eef433561c8ace97a0a7b969d56e8b1d39b5d177"}, - {file = "protobuf-3.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18c40a1b8721026a85187640f1786d52407dc9c1ba8ec38accb57a46e84015f6"}, - {file = "protobuf-3.19.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:af7238849fa79285d448a24db686517570099739527a03c9c2971cce99cc5ae2"}, - {file = "protobuf-3.19.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e765e6dfbbb02c55e4d6d1145743401a84fc0b508f5a81b2c5a738cf86353139"}, - {file = "protobuf-3.19.3-cp37-cp37m-win32.whl", hash = "sha256:c781402ed5396ab56358d7b866d78c03a77cbc26ba0598d8bb0ac32084b1a257"}, - {file = "protobuf-3.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:544fe9705189b249380fae07952d220c97f5c6c9372a6f936cc83a79601dcb70"}, - {file = "protobuf-3.19.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84bf3aa3efb00dbe1c7ed55da0f20800b0662541e582d7e62b3e1464d61ed365"}, - {file = "protobuf-3.19.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3f80a3491eaca767cdd86cb8660dc778f634b44abdb0dffc9b2a8e8d0cd617d0"}, - {file = "protobuf-3.19.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9401d96552befcc7311f5ef8f0fa7dba0ef5fd805466b158b141606cd0ab6a8"}, - {file = "protobuf-3.19.3-cp38-cp38-win32.whl", hash = "sha256:ef02d112c025e83db5d1188a847e358beab3e4bbfbbaf10eaf69e67359af51b2"}, - {file = "protobuf-3.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:1291a0a7db7d792745c99d4657b4c5c4942695c8b1ac1bfb993a34035ec123f7"}, - {file = "protobuf-3.19.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49677e5e9c7ea1245a90c2e8a00d304598f22ea3aa0628f0e0a530a9e70665fa"}, - {file = "protobuf-3.19.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:df2ba379ee42427e8fcc6a0a76843bff6efb34ef5266b17f95043939b5e25b69"}, - {file = "protobuf-3.19.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2acd7ca329be544d1a603d5f13a4e34a3791c90d651ebaf130ba2e43ae5397c6"}, - {file = "protobuf-3.19.3-cp39-cp39-win32.whl", hash = "sha256:b53519b2ebec70cfe24b4ddda21e9843f0918d7c3627a785393fb35d402ab8ad"}, - {file = "protobuf-3.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:8ceaf5fdb72c8e1fcb7be9f2b3b07482ce058a3548180c0bdd5c7e4ac5e14165"}, - {file = "protobuf-3.19.3-py2.py3-none-any.whl", hash = "sha256:f6d4b5b7595a57e69eb7314c67bef4a3c745b4caf91accaf72913d8e0635111b"}, - {file = "protobuf-3.19.3.tar.gz", hash = "sha256:d975a6314fbf5c524d4981e24294739216b5fb81ef3c14b86fb4b045d6690907"}, + {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, + {file = "protobuf-3.19.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072fbc78d705d3edc7ccac58a62c4c8e0cec856987da7df8aca86e647be4e35c"}, + {file = "protobuf-3.19.4-cp310-cp310-win32.whl", hash = "sha256:7bb03bc2873a2842e5ebb4801f5c7ff1bfbdf426f85d0172f7644fcda0671ae0"}, + {file = "protobuf-3.19.4-cp310-cp310-win_amd64.whl", hash = "sha256:f358aa33e03b7a84e0d91270a4d4d8f5df6921abe99a377828839e8ed0c04e07"}, + {file = "protobuf-3.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1c91ef4110fdd2c590effb5dca8fdbdcb3bf563eece99287019c4204f53d81a4"}, + {file = "protobuf-3.19.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c438268eebb8cf039552897d78f402d734a404f1360592fef55297285f7f953f"}, + {file = "protobuf-3.19.4-cp36-cp36m-win32.whl", hash = "sha256:835a9c949dc193953c319603b2961c5c8f4327957fe23d914ca80d982665e8ee"}, + {file = "protobuf-3.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4276cdec4447bd5015453e41bdc0c0c1234eda08420b7c9a18b8d647add51e4b"}, + {file = "protobuf-3.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6cbc312be5e71869d9d5ea25147cdf652a6781cf4d906497ca7690b7b9b5df13"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54a1473077f3b616779ce31f477351a45b4fef8c9fd7892d6d87e287a38df368"}, + {file = "protobuf-3.19.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:435bb78b37fc386f9275a7035fe4fb1364484e38980d0dd91bc834a02c5ec909"}, + {file = "protobuf-3.19.4-cp37-cp37m-win32.whl", hash = "sha256:16f519de1313f1b7139ad70772e7db515b1420d208cb16c6d7858ea989fc64a9"}, + {file = "protobuf-3.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:cdc076c03381f5c1d9bb1abdcc5503d9ca8b53cf0a9d31a9f6754ec9e6c8af0f"}, + {file = "protobuf-3.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:69da7d39e39942bd52848438462674c463e23963a1fdaa84d88df7fbd7e749b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:48ed3877fa43e22bcacc852ca76d4775741f9709dd9575881a373bd3e85e54b2"}, + {file = "protobuf-3.19.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd95d1dfb9c4f4563e6093a9aa19d9c186bf98fa54da5252531cc0d3a07977e7"}, + {file = "protobuf-3.19.4-cp38-cp38-win32.whl", hash = "sha256:b38057450a0c566cbd04890a40edf916db890f2818e8682221611d78dc32ae26"}, + {file = "protobuf-3.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:7ca7da9c339ca8890d66958f5462beabd611eca6c958691a8fe6eccbd1eb0c6e"}, + {file = "protobuf-3.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:36cecbabbda242915529b8ff364f2263cd4de7c46bbe361418b5ed859677ba58"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c1068287025f8ea025103e37d62ffd63fec8e9e636246b89c341aeda8a67c934"}, + {file = "protobuf-3.19.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96bd766831596d6014ca88d86dc8fe0fb2e428c0b02432fd9db3943202bf8c5e"}, + {file = "protobuf-3.19.4-cp39-cp39-win32.whl", hash = "sha256:84123274d982b9e248a143dadd1b9815049f4477dc783bf84efe6250eb4b836a"}, + {file = "protobuf-3.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:3112b58aac3bac9c8be2b60a9daf6b558ca3f7681c130dcdd788ade7c9ffbdca"}, + {file = "protobuf-3.19.4-py2.py3-none-any.whl", hash = "sha256:8961c3a78ebfcd000920c9060a262f082f29838682b1f7201889300c1fbe0616"}, + {file = "protobuf-3.19.4.tar.gz", hash = "sha256:9df0c10adf3e83015ced42a9a7bd64e13d06c4cf45c340d2c63020ea04499d0a"}, ] py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, @@ -610,12 +640,12 @@ typed-ast = [ {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, ] types-futures = [ - {file = "types-futures-3.3.7.tar.gz", hash = "sha256:d286db818fb67e3ce5c28acd9058c067329b91865acc443ac3cf91497fa36f05"}, - {file = "types_futures-3.3.7-py3-none-any.whl", hash = "sha256:67fcd373796c8b4fb94fdb9cb006718c34be306f292b1e1f1e01f980d3630be5"}, + {file = "types-futures-3.3.8.tar.gz", hash = "sha256:6fe8ccc2c2af7ef2fdd9bf73eab6d617074f09f30ad7d373510b4043d39c42de"}, + {file = "types_futures-3.3.8-py3-none-any.whl", hash = "sha256:d6e97ec51d56b96debfbf1dea32ebec22c1687f16d2547ea0a34b48db45df205"}, ] types-protobuf = [ - {file = "types-protobuf-3.19.6.tar.gz", hash = "sha256:ba586359dc80f09abbff7d143cedd5c86d2c6e376b46647c7aa2262741accba1"}, - {file = "types_protobuf-3.19.6-py3-none-any.whl", hash = "sha256:0d704eca8c7cf2cc9d8b52f63a5d03d7c39dff8f0915c23b3e90cb1563617e18"}, + {file = "types-protobuf-3.19.8.tar.gz", hash = "sha256:5ff1a5b7d0f36e3600ad1a3d4b55ba6c446cef2ef82d25f06a0aa43912345fb4"}, + {file = "types_protobuf-3.19.8-py3-none-any.whl", hash = "sha256:1364327ebfb4360b36bd62b55fb32f704a516c8c26d82bad566938a23e644eca"}, ] typing-extensions = [ {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, diff --git a/pyproject.toml b/pyproject.toml index 122543b43..d3c03560a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,13 +21,15 @@ typing-extensions = "^4.0.1" black = "^21.12b0" grpcio-tools = "^1.43.0" isort = "^5.10.1" +maturin = "^0.12.6" mypy = "^0.931" mypy-protobuf = "^3.2.0" pytest = "^6.2.5" pytest-asyncio = "^0.17.2" [tool.poe.tasks] -build = ["gen-protos", "test"] +build = ["gen-protos", "build-bridge", "test"] +build-bridge = "python scripts/build-bridge.py" format = [{cmd = "black ."}, {cmd = "isort ."}] gen-protos = "python scripts/gen-protos.py" lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}, "lint-types"] diff --git a/scripts/build-bridge.py b/scripts/build-bridge.py new file mode 100644 index 000000000..8ece044e9 --- /dev/null +++ b/scripts/build-bridge.py @@ -0,0 +1,12 @@ +import subprocess +import sys +from pathlib import Path + +base_dir = Path(__file__).parent.parent + +if __name__ == "__main__": + print("Building Core bridge", file=sys.stderr) + subprocess.check_call( + ["maturin", "develop"], cwd=str(base_dir / "temporalio" / "bridge") + ) + print("Done", file=sys.stderr) diff --git a/temporalio/bridge/Cargo.lock b/temporalio/bridge/Cargo.lock new file mode 100644 index 000000000..352e39c13 --- /dev/null +++ b/temporalio/bridge/Cargo.lock @@ -0,0 +1,2330 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "0.7.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +dependencies = [ + "memchr", +] + +[[package]] +name = "ansi_term" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" +dependencies = [ + "winapi", +] + +[[package]] +name = "anyhow" +version = "1.0.53" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94a45b455c14666b85fc40a019e8ab9eb75e3a124e05494f5397122bc9eb06e0" + +[[package]] +name = "arc-swap" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" + +[[package]] +name = "async-stream" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "171374e7e3b2504e0e5236e3b59260560f9fe94bfe9ac39ba5e4e929c5590625" +dependencies = [ + "async-stream-impl", + "futures-core", +] + +[[package]] +name = "async-stream-impl" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "648ed8c8d2ce5409ccd57453d9d1b214b342a0d69376a6feda1fd6cae3299308" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "async-trait" +version = "0.1.52" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "061a7acccaa286c011ddc30970520b98fa40e00c9d644633fb26b5fc63a265e3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "autocfg" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a" + +[[package]] +name = "backoff" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fe17f59a06fe8b87a6fc8bf53bb70b3aba76d7685f432487a68cd5552853625" +dependencies = [ + "getrandom", + "instant", + "rand", +] + +[[package]] +name = "base64" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bumpalo" +version = "3.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" + +[[package]] +name = "bytes" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" + +[[package]] +name = "cache-padded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1db59621ec70f09c5e9b597b220c7a2b43611f4710dc03ceb8748637775692c" + +[[package]] +name = "cc" +version = "1.0.72" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + +[[package]] +name = "core-foundation" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6888e10551bb93e424d8df1d07f1a8b4fceb0001a3a4b048bfc47554946f47b3" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" + +[[package]] +name = "crossbeam" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae5588f6b3c3cb05239e90bd110f257254aecd01e4635400391aeae07497845" +dependencies = [ + "cfg-if", + "crossbeam-channel", + "crossbeam-deque", + "crossbeam-epoch", + "crossbeam-queue", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-channel" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e54ea8bc3fb1ee042f5aace6e3c6e025d3874866da222930f70ce62aceba0bfa" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97242a70df9b89a65d0b6df3c4bf5b9ce03c5b7309019777fbde37e7537f8762" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "lazy_static", + "memoffset", + "scopeguard", +] + +[[package]] +name = "crossbeam-queue" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b979d76c9fcb84dffc80a73f7290da0f83e4c95773494674cb44b76d13a7a110" +dependencies = [ + "cfg-if", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcae03edb34f947e64acdb1c33ec169824e20657e9ecb61cef6c8c74dcb8120" +dependencies = [ + "cfg-if", + "lazy_static", +] + +[[package]] +name = "ctor" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccc0a48a9b826acdf4028595adc9db92caea352f7af011a3034acd172a52a0aa" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "darling" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f2c43f534ea4b0b049015d00269734195e6d3f0f6635cb692251aca6f9f8b3c" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e91455b86830a1c21799d94524df0845183fa55bafd9aa137b01c7d1065fa36" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29b5acf0dea37a7f66f7b25d2c5e93fd46f8f6968b1a5d7a3e02e97768afc95a" +dependencies = [ + "darling_core", + "quote", + "syn", +] + +[[package]] +name = "dashmap" +version = "4.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e77a43b28d0668df09411cb0bc9a8c2adc40f9a048afe863e05fd43251e8e39c" +dependencies = [ + "cfg-if", + "num_cpus", +] + +[[package]] +name = "dashmap" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b799062aaf67eb976af3bdca031ee6f846d2f0a5710ddbb0d2efee33f3cc4760" +dependencies = [ + "cfg-if", + "num_cpus", + "parking_lot", +] + +[[package]] +name = "derive_builder" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d13202debe11181040ae9063d739fa32cfcaaebe2275fe387703460ae2365b30" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66e616858f6187ed828df7c64a6d71720d83767a7f19740b2d1b6fe6327b36e5" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58a94ace95092c5acb1e97a7e846b310cfbd499652f72297da7493f618a98d73" +dependencies = [ + "derive_builder_core", + "syn", +] + +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn", +] + +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + +[[package]] +name = "downcast" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" + +[[package]] +name = "either" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" + +[[package]] +name = "enum_dispatch" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd53b3fde38a39a06b2e66dc282f3e86191e53bd04cc499929c15742beae3df8" +dependencies = [ + "once_cell", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "fastrand" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +dependencies = [ + "instant", +] + +[[package]] +name = "fixedbitset" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d" + +[[package]] +name = "fixedbitset" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "279fb028e20b3c4c320317955b77c5e0c9701f05a1d309905d6fc702cdc5053e" + +[[package]] +name = "float-cmp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" +dependencies = [ + "num-traits", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "form_urlencoded" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +dependencies = [ + "matches", + "percent-encoding", +] + +[[package]] +name = "fragile" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8da1b8f89c5b5a5b7e59405cfcf0bb9588e5ed19f0b57a4cd542bbba3f164a6d" + +[[package]] +name = "futures" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28560757fe2bb34e79f907794bb6b22ae8b0e5c669b638a1132f2592b19035b4" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3dda0b6588335f360afc675d0564c17a77a2bda81ca178a4b6081bd86c7f0b" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0c8ff0461b82559810cdccfde3215c3f373807f5e5232b71479bff7bb2583d7" + +[[package]] +name = "futures-executor" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29d6d2ff5bb10fb95c85b8ce46538a2e5f5e7fdc755623a7d4529ab8a4ed9d2a" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1f9d34af5a1aac6fb380f735fe510746c38067c5bf16c7fd250280503c971b2" + +[[package]] +name = "futures-macro" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbd947adfffb0efc70599b3ddcf7b5597bb5fa9e245eb99f62b3a5f7bb8bd3c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "futures-retry" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fde5a672a61f96552aa5ed9fd9c81c3fbdae4be9b1e205d6eaf17c83705adc0f" +dependencies = [ + "futures", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "futures-sink" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3055baccb68d74ff6480350f8d6eb8fcfa3aa11bdc1a1ae3afdd0514617d508" + +[[package]] +name = "futures-task" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ee7c6485c30167ce4dfb83ac568a849fe53274c831081476ee13e0dce1aad72" + +[[package]] +name = "futures-util" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b5cf40b47a271f77a8b1bec03ca09044d99d2372c0de244e66430761127164" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "getrandom" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418d37c8b1d42553c93648be529cb70f920d3baf8ef469b74b9638df426e0b4c" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "ghost" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a5bcf1bbeab73aa4cf2fde60a846858dc036163c7c33bec309f8d17de785479" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "h2" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9f1f717ddc7b2ba36df7e871fd88db79326551d3d6f1fc406fbfd28b582ff8e" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e" +dependencies = [ + "ahash", +] + +[[package]] +name = "heck" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c" +dependencies = [ + "unicode-segmentation", +] + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "http" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31f4c6746584866f0feabcc69893c5b51beef3831656a968ed7ae254cdc4fd03" +dependencies = [ + "bytes", + "fnv", + "itoa 1.0.1", +] + +[[package]] +name = "http-body" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ff4f84919677303da5f147645dbea6b1881f368d03ac84e1dc09031ebd7b2c6" +dependencies = [ + "bytes", + "http", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acd94fdbe1d4ff688b67b04eee2e17bd50995534a61539e45adfefb45e5e5503" + +[[package]] +name = "httpdate" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" + +[[package]] +name = "hyper" +version = "0.14.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7ec3e62bdc98a2f0393a5048e4c30ef659440ea6e0e572965103e72bd836f55" +dependencies = [ + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "httparse", + "httpdate", + "itoa 0.4.8", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", + "want", +] + +[[package]] +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "indexmap" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282a6247722caba404c065016bbfa522806e51714c34f5dfc3e4a3a46fcb4223" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "indoc" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47741a8bc60fb26eb8d6e0238bbb26d8575ff623fdc97b1a2c00c050b9684ed8" +dependencies = [ + "indoc-impl", + "proc-macro-hack", +] + +[[package]] +name = "indoc-impl" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce046d161f000fffde5f432a0d034d0341dc152643b2598ed5bfce44c4f3a8f0" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote", + "syn", + "unindent", +] + +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "inventory" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0eb5160c60ba1e809707918ee329adb99d222888155835c6feedba19f6c3fd4" +dependencies = [ + "ctor", + "ghost", + "inventory-impl", +] + +[[package]] +name = "inventory-impl" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e41b53715c6f0c4be49510bb82dee2c1e51c8586d885abe65396e82ed518548" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "itertools" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + +[[package]] +name = "itoa" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" + +[[package]] +name = "js-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a38fc24e30fd564ce974c02bf1d337caddff65be6cc4735a1f7eab22a7440f04" +dependencies = [ + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.116" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "565dbd88872dbe4cc8a46e527f26483c1d1f7afa6b884a3bd6cd893d4f98da74" + +[[package]] +name = "lock_api" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88943dd7ef4a2e5a4bfa2753aaab3013e34ce2533d1996fb18ef591e315e2b3b" +dependencies = [ + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "lru" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "274353858935c992b13c0ca408752e2121da852d07dec7ce5f108c77dfa14d1f" +dependencies = [ + "hashbrown", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata", +] + +[[package]] +name = "matches" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" + +[[package]] +name = "memchr" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a" + +[[package]] +name = "memoffset" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce" +dependencies = [ + "autocfg", +] + +[[package]] +name = "mio" +version = "0.7.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8067b404fe97c70829f082dec8bcf4f71225d7eaea1d8645349cb76fa06205cc" +dependencies = [ + "libc", + "log", + "miow", + "ntapi", + "winapi", +] + +[[package]] +name = "miow" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9f1c5b025cda876f66ef43a113f91ebc9f4ccef34843000e0adf6ebbab84e21" +dependencies = [ + "winapi", +] + +[[package]] +name = "mockall" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d4d70639a72f972725db16350db56da68266ca368b2a1fe26724a903ad3d6b8" +dependencies = [ + "cfg-if", + "downcast", + "fragile", + "lazy_static", + "mockall_derive", + "predicates", + "predicates-tree", +] + +[[package]] +name = "mockall_derive" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79ef208208a0dea3f72221e26e904cdc6db2e481d9ade89081ddd494f1dbaa6b" +dependencies = [ + "cfg-if", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "normalize-line-endings" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" + +[[package]] +name = "ntapi" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f6bb902e437b6d86e03cce10a7e2af662292c5dfef23b65899ea3ac9354ad44" +dependencies = [ + "winapi", +] + +[[package]] +name = "num-traits" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290" +dependencies = [ + "autocfg", +] + +[[package]] +name = "num_cpus" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1" +dependencies = [ + "hermit-abi", + "libc", +] + +[[package]] +name = "once_cell" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da32515d9f6e6e489d7bc9d84c71b060db7247dc035bbe44eac88cf87486d8d5" + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "opentelemetry" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1cf9b1c4e9a6c4de793c632496fa490bdc0e1eea73f0c91394f7b6990935d22" +dependencies = [ + "async-trait", + "crossbeam-channel", + "dashmap 4.0.2", + "fnv", + "futures", + "js-sys", + "lazy_static", + "percent-encoding", + "pin-project", + "rand", + "thiserror", + "tokio", + "tokio-stream", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f19d4b43842433c420c548c985d158f5628bba5b518e0be64627926d19889992" +dependencies = [ + "async-trait", + "futures", + "http", + "opentelemetry", + "prost 0.8.0", + "thiserror", + "tokio", + "tonic 0.5.2", + "tonic-build 0.5.2", +] + +[[package]] +name = "opentelemetry-prometheus" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ee9c06c1366665e7d4dba6540a42ea48900a9c92dc5b963f3ae05fbba76dc63" +dependencies = [ + "opentelemetry", + "prometheus", + "protobuf", +] + +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216" +dependencies = [ + "cfg-if", + "instant", + "libc", + "redox_syscall", + "smallvec", + "winapi", +] + +[[package]] +name = "paste" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45ca20c77d80be666aef2b45486da86238fabe33e38306bd3118fe4af33fa880" +dependencies = [ + "paste-impl", + "proc-macro-hack", +] + +[[package]] +name = "paste-impl" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d95a7db200b97ef370c8e6de0088252f7e0dfff7d047a28528e47456c0fc98b6" +dependencies = [ + "proc-macro-hack", +] + +[[package]] +name = "percent-encoding" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" + +[[package]] +name = "petgraph" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7" +dependencies = [ + "fixedbitset 0.2.0", + "indexmap", +] + +[[package]] +name = "petgraph" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a13a2fa9d0b63e5f22328828741e523766fff0ee9e779316902290dff3f824f" +dependencies = [ + "fixedbitset 0.4.1", + "indexmap", +] + +[[package]] +name = "pin-project" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +dependencies = [ + "pin-project-internal", +] + +[[package]] +name = "pin-project-internal" +version = "1.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e280fbe77cc62c91527259e9442153f4688736748d24660126286329742b4c6c" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "ppv-lite86" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872" + +[[package]] +name = "predicates" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5aab5be6e4732b473071984b3164dbbfb7a3674d30ea5ff44410b6bcd960c3c" +dependencies = [ + "difflib", + "float-cmp", + "itertools", + "normalize-line-endings", + "predicates-core", + "regex", +] + +[[package]] +name = "predicates-core" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da1c2388b1513e1b605fcec39a95e0a9e8ef088f71443ef37099fa9ae6673fcb" + +[[package]] +name = "predicates-tree" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d86de6de25020a36c6d3643a86d9a6a9f552107c0559c60ea03551b5e16c032" +dependencies = [ + "predicates-core", + "termtree", +] + +[[package]] +name = "proc-macro-hack" +version = "0.5.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" + +[[package]] +name = "proc-macro2" +version = "1.0.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7342d5883fbccae1cc37a2353b09c87c9b0f3afd73f5fb9bba687a1f733b029" +dependencies = [ + "unicode-xid", +] + +[[package]] +name = "prometheus" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5986aa8d62380092d2f50f8b1cdba9cb9b6731ffd4b25b51fd126b6c3e05b99c" +dependencies = [ + "cfg-if", + "fnv", + "lazy_static", + "memchr", + "parking_lot", + "protobuf", + "thiserror", +] + +[[package]] +name = "prost" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de5e2533f59d08fcf364fd374ebda0692a70bd6d7e66ef97f306f45c6c5d8020" +dependencies = [ + "bytes", + "prost-derive 0.8.0", +] + +[[package]] +name = "prost" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "444879275cb4fd84958b1a1d5420d15e6fcf7c235fe47f053c9c2a80aceb6001" +dependencies = [ + "bytes", + "prost-derive 0.9.0", +] + +[[package]] +name = "prost-build" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "355f634b43cdd80724ee7848f95770e7e70eefa6dcf14fea676216573b8fd603" +dependencies = [ + "bytes", + "heck", + "itertools", + "log", + "multimap", + "petgraph 0.5.1", + "prost 0.8.0", + "prost-types 0.8.0", + "tempfile", + "which", +] + +[[package]] +name = "prost-build" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62941722fb675d463659e49c4f3fe1fe792ff24fe5bbaa9c08cd3b98a1c354f5" +dependencies = [ + "bytes", + "heck", + "itertools", + "lazy_static", + "log", + "multimap", + "petgraph 0.6.0", + "prost 0.9.0", + "prost-types 0.9.0", + "regex", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "600d2f334aa05acb02a755e217ef1ab6dea4d51b58b7846588b747edec04efba" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "prost-derive" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9cc1a3263e07e0bf68e96268f37665207b49560d98739662cdfaae215c720fe" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "prost-types" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "603bbd6394701d13f3f25aada59c7de9d35a6a5887cfc156181234a44002771b" +dependencies = [ + "bytes", + "prost 0.8.0", +] + +[[package]] +name = "prost-types" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "534b7a0e836e3c482d2693070f982e39e7611da9695d4d1f5a4b186b51faef0a" +dependencies = [ + "bytes", + "prost 0.9.0", +] + +[[package]] +name = "protobuf" +version = "2.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00e95f7417529a121d3c1d0bd831fd86cc5d5bf7b77ae1449259db3d5ff8b3e7" + +[[package]] +name = "pyo3" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7cf01dbf1c05af0a14c7779ed6f3aa9deac9c3419606ac9de537a2d649005720" +dependencies = [ + "cfg-if", + "indoc", + "libc", + "parking_lot", + "paste", + "pyo3-build-config", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-asyncio" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0897c7e36110a32b726b975359b2bbe90c37fcf1266046d3b1c08c616a47a886" +dependencies = [ + "futures", + "inventory", + "once_cell", + "pin-project-lite", + "pyo3", + "tokio", +] + +[[package]] +name = "pyo3-build-config" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf9e4d128bfbddc898ad3409900080d8d5095c379632fbbfbb9c8cfb1fb852b" +dependencies = [ + "once_cell", +] + +[[package]] +name = "pyo3-macros" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67701eb32b1f9a9722b4bc54b548ff9d7ebfded011c12daece7b9063be1fd755" +dependencies = [ + "pyo3-macros-backend", + "quote", + "syn", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f44f09e825ee49a105f2c7b23ebee50886a9aee0746f4dd5a704138a64b0218a" +dependencies = [ + "proc-macro2", + "pyo3-build-config", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "864d3e96a899863136fc6e99f3d7cae289dafe43bf2c5ac19b70df7210c0a145" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e7573632e6454cf6b99d7aac4ccca54be06da05aca2ef7423d22d27d4d4bcd8" +dependencies = [ + "libc", + "rand_chacha", + "rand_core", + "rand_hc", +] + +[[package]] +name = "rand_chacha" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "rand_hc" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d51e9f596de227fda2ea6c84607f5558e196eeaf43c986b724ba4fb8fdf497e7" +dependencies = [ + "rand_core", +] + +[[package]] +name = "redox_syscall" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8383f39639269cde97d255a32bdb68c047337295414940c68bdd30c2e13203ff" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.6.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b" + +[[package]] +name = "remove_dir_all" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7" +dependencies = [ + "winapi", +] + +[[package]] +name = "ring" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin", + "untrusted", + "web-sys", + "winapi", +] + +[[package]] +name = "ringbuf" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c60f3923939c33e6c543ddbff14d0ee6a407fcd186d560be37282559616adf3" +dependencies = [ + "cache-padded", +] + +[[package]] +name = "rustc_version" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" +dependencies = [ + "semver", +] + +[[package]] +name = "rustfsm" +version = "0.1.0" +dependencies = [ + "rustfsm_procmacro", + "rustfsm_trait", +] + +[[package]] +name = "rustfsm_procmacro" +version = "0.1.0" +dependencies = [ + "derive_more", + "proc-macro2", + "quote", + "rustfsm_trait", + "syn", +] + +[[package]] +name = "rustfsm_trait" +version = "0.1.0" + +[[package]] +name = "rustls" +version = "0.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35edb675feee39aec9c99fa5ff985081995a06d594114ae14cbe797ad7b7a6d7" +dependencies = [ + "base64", + "log", + "ring", + "sct", + "webpki", +] + +[[package]] +name = "rustls-native-certs" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a07b7c1885bd8ed3831c289b7870b13ef46fe0e856d288c30d9cc17d75a2092" +dependencies = [ + "openssl-probe", + "rustls", + "schannel", + "security-framework", +] + +[[package]] +name = "ryu" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f" + +[[package]] +name = "schannel" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f05ba609c234e60bee0d547fe94a4c7e9da733d1c962cf6e59efa4cd9c8bc75" +dependencies = [ + "lazy_static", + "winapi", +] + +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + +[[package]] +name = "sct" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b362b83898e0e69f38515b82ee15aa80636befe47c3b6d3d89a911e78fc228ce" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "security-framework" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fed7948b6c68acbb6e20c334f55ad635dc0f75506963de4464289fbd3b051ac" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a57321bf8bc2362081b2599912d2961fe899c0efadf1b4b2f8d48b3e253bb96c" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "568a8e6258aa33c13358f81fd834adb854c6f7c9468520910a9b1e8fac068012" + +[[package]] +name = "serde" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.136" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.78" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d23c1ba4cf0efd44be32017709280b32d1cea5c3f1275c3b6d9e8bc54f758085" +dependencies = [ + "itoa 1.0.1", + "ryu", + "serde", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "signal-hook-registry" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e51e73328dc4ac0c7ccbda3a494dfa03df1de2f46018127f60c693f2648455b0" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9def91fd1e018fe007022791f865d0ccc9b3a0d5001e01aabb8b40e46000afb5" + +[[package]] +name = "slotmap" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1e08e261d0e8f5c43123b7adf3e4ca1690d655377ac93a03b2c9d3e98de1342" +dependencies = [ + "version_check", +] + +[[package]] +name = "smallvec" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" + +[[package]] +name = "socket2" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" +dependencies = [ + "libc", + "winapi", +] + +[[package]] +name = "spin" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" + +[[package]] +name = "strsim" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" + +[[package]] +name = "syn" +version = "1.0.86" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a65b3f4ffa0092e9887669db0eae07941f023991ab58ea44da8fe8e2d511c6b" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "tempfile" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4" +dependencies = [ + "cfg-if", + "fastrand", + "libc", + "redox_syscall", + "remove_dir_all", + "winapi", +] + +[[package]] +name = "temporal-client" +version = "0.1.0" +dependencies = [ + "async-trait", + "backoff", + "derive_builder", + "derive_more", + "futures", + "futures-retry", + "http", + "mockall", + "opentelemetry", + "prost-types 0.9.0", + "temporal-sdk-core-protos", + "thiserror", + "tokio", + "tonic 0.6.2", + "tower", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "temporal-sdk-bridge" +version = "0.1.0" +dependencies = [ + "prost 0.9.0", + "prost-types 0.9.0", + "pyo3", + "pyo3-asyncio", + "temporal-client", + "temporal-sdk-core", + "temporal-sdk-core-api", + "temporal-sdk-core-protos", + "tokio", + "tonic 0.6.2", + "url", +] + +[[package]] +name = "temporal-sdk-core" +version = "0.1.0" +dependencies = [ + "anyhow", + "arc-swap", + "async-trait", + "base64", + "crossbeam", + "dashmap 5.0.0", + "derive_builder", + "derive_more", + "enum_dispatch", + "futures", + "http", + "hyper", + "itertools", + "lazy_static", + "log", + "lru", + "once_cell", + "opentelemetry", + "opentelemetry-otlp", + "opentelemetry-prometheus", + "parking_lot", + "prometheus", + "prost 0.9.0", + "prost-types 0.9.0", + "rand", + "ringbuf", + "rustfsm", + "serde", + "slotmap", + "temporal-client", + "temporal-sdk-core-api", + "temporal-sdk-core-protos", + "thiserror", + "tokio", + "tokio-stream", + "tokio-util", + "tonic 0.6.2", + "tonic-build 0.6.2", + "tracing", + "tracing-futures", + "tracing-opentelemetry", + "tracing-subscriber", + "url", + "uuid", +] + +[[package]] +name = "temporal-sdk-core-api" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "derive_builder", + "log", + "opentelemetry", + "prost-types 0.9.0", + "temporal-client", + "temporal-sdk-core-protos", + "thiserror", + "tonic 0.6.2", +] + +[[package]] +name = "temporal-sdk-core-protos" +version = "0.1.0" +dependencies = [ + "anyhow", + "base64", + "derive_more", + "prost 0.9.0", + "prost-types 0.9.0", + "rand", + "serde", + "serde_json", + "thiserror", + "tonic 0.6.2", + "tonic-build 0.6.2", + "uuid", +] + +[[package]] +name = "termtree" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507e9898683b6c43a9aa55b64259b721b52ba226e0f3779137e50ad114a4c90b" + +[[package]] +name = "thiserror" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "thread_local" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180" +dependencies = [ + "once_cell", +] + +[[package]] +name = "tinyvec" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c" + +[[package]] +name = "tokio" +version = "1.16.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c27a64b625de6d309e8c57716ba93021dccf1b3b5c97edd6d3dd2d2135afc0a" +dependencies = [ + "bytes", + "libc", + "memchr", + "mio", + "num_cpus", + "once_cell", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "tokio-macros", + "winapi", +] + +[[package]] +name = "tokio-io-timeout" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30b74022ada614a1b4834de765f9bb43877f910cc8ce4be40e89042c9223a8bf" +dependencies = [ + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-macros" +version = "1.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tokio-rustls" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6844de72e57df1980054b38be3a9f4702aba4858be64dd700181a8a6d0e1b6" +dependencies = [ + "rustls", + "tokio", + "webpki", +] + +[[package]] +name = "tokio-stream" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50145484efff8818b5ccd256697f36863f587da82cf8b409c53adf1e840798e3" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e99e1983e5d376cd8eb4b66604d2e99e79f5bd988c3055891dcd8c9e2604cc0" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tonic" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "796c5e1cd49905e65dd8e700d4cb1dffcbfdb4fc9d017de08c1a537afd83627c" +dependencies = [ + "async-stream", + "async-trait", + "base64", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost 0.8.0", + "prost-derive 0.8.0", + "tokio", + "tokio-stream", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff08f4649d10a70ffa3522ca559031285d8e421d727ac85c60825761818f5d0a" +dependencies = [ + "async-stream", + "async-trait", + "base64", + "bytes", + "futures-core", + "futures-util", + "h2", + "http", + "http-body", + "hyper", + "hyper-timeout", + "percent-encoding", + "pin-project", + "prost 0.9.0", + "prost-derive 0.9.0", + "rustls-native-certs", + "tokio", + "tokio-rustls", + "tokio-stream", + "tokio-util", + "tower", + "tower-layer", + "tower-service", + "tracing", + "tracing-futures", +] + +[[package]] +name = "tonic-build" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12b52d07035516c2b74337d2ac7746075e7dcae7643816c1b12c5ff8a7484c08" +dependencies = [ + "proc-macro2", + "prost-build 0.8.0", + "quote", + "syn", +] + +[[package]] +name = "tonic-build" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9403f1bafde247186684b230dc6f38b5cd514584e8bec1dd32514be4745fa757" +dependencies = [ + "proc-macro2", + "prost-build 0.9.0", + "quote", + "syn", +] + +[[package]] +name = "tower" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5651b5f6860a99bd1adb59dbfe1db8beb433e73709d9032b413a77e2fb7c066a" +dependencies = [ + "futures-core", + "futures-util", + "indexmap", + "pin-project", + "pin-project-lite", + "rand", + "slab", + "tokio", + "tokio-stream", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "tower-layer" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" + +[[package]] +name = "tower-service" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" + +[[package]] +name = "tracing" +version = "0.1.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "375a639232caf30edfc78e8d89b2d4c375515393e7af7e16f01cd96917fb2105" +dependencies = [ + "cfg-if", + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f480b8f81512e825f337ad51e94c1eb5d3bbdf2b363dcd01e2b19a9ffe3f8e" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f4ed65637b8390770814083d20756f87bfa2c21bf2f110babdc5438351746e4" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "tracing-futures" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97d095ae15e245a057c8e8451bab9b3ee1e1f68e9ba2b4fbc18d0ac5237835f2" +dependencies = [ + "pin-project", + "tracing", +] + +[[package]] +name = "tracing-log" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6923477a48e41c1951f1999ef8bb5a3023eb723ceadafe78ffb65dc366761e3" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-opentelemetry" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ffbf13a0f8b054a4e59df3a173b818e9c6177c02789871f2073977fd0062076" +dependencies = [ + "opentelemetry", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5312f325fe3588e277415f5a6cca1f4ccad0f248c4cd5a4bd33032d7286abc22" +dependencies = [ + "ansi_term", + "lazy_static", + "matchers", + "parking_lot", + "regex", + "sharded-slab", + "smallvec", + "thread_local", + "tracing", + "tracing-core", + "tracing-log", +] + +[[package]] +name = "try-lock" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59547bce71d9c38b83d9c0e92b6066c4253371f15005def0c30d9657f50c7642" + +[[package]] +name = "unicode-bidi" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f" + +[[package]] +name = "unicode-normalization" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unicode-segmentation" +version = "1.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8895849a949e7845e06bd6dc1aa51731a103c42707010a5b591c0038fb73385b" + +[[package]] +name = "unicode-xid" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3" + +[[package]] +name = "unindent" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f14ee04d9415b52b3aeab06258a3f07093182b88ba0f9b8d203f211a7a7d41c7" + +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + +[[package]] +name = "url" +version = "2.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +dependencies = [ + "form_urlencoded", + "idna", + "matches", + "percent-encoding", +] + +[[package]] +name = "uuid" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" +dependencies = [ + "getrandom", +] + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "want" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ce8a968cb1cd110d136ff8b819a556d6fb6d919363c61534f6860c7eb172ba0" +dependencies = [ + "log", + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.10.2+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6" + +[[package]] +name = "wasm-bindgen" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25f1af7423d8588a3d840681122e72e6a24ddbcb3f0ec385cac0d12d24256c06" +dependencies = [ + "cfg-if", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b21c0df030f5a177f3cba22e9bc4322695ec43e7257d865302900290bcdedca" +dependencies = [ + "bumpalo", + "lazy_static", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f4203d69e40a52ee523b2529a773d5ffc1dc0071801c87b3d270b471b80ed01" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa8a30d46208db204854cadbb5d4baf5fcf8071ba5bf48190c3e59937962ebc" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.79" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d958d035c4438e28c70e4321a2911302f10135ce78a9c7834c0cab4123d06a2" + +[[package]] +name = "web-sys" +version = "0.3.56" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c060b319f29dd25724f09a2ba1418f142f539b2be99fbf4d2d5a8f7330afb8eb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki" +version = "0.21.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e38c0608262c46d4a56202ebabdeb094cef7e560ca7a226c6bf055188aa4ea" +dependencies = [ + "ring", + "untrusted", +] + +[[package]] +name = "which" +version = "4.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a5a7e487e921cf220206864a94a89b6c6905bfc19f1057fa26a4cb360e5c1d2" +dependencies = [ + "either", + "lazy_static", + "libc", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" diff --git a/temporalio/bridge/Cargo.toml b/temporalio/bridge/Cargo.toml new file mode 100644 index 000000000..5edd55cf9 --- /dev/null +++ b/temporalio/bridge/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "temporal-sdk-bridge" +version = "0.1.0" +edition = "2021" + +[lib] +name = "temporal_sdk_bridge" +crate-type = ["cdylib"] + +[dependencies] +prost = "0.9" +prost-types = "0.9" +pyo3 = { version = "0.15", features = ["extension-module"] } +pyo3-asyncio = { version = "0.15", features = ["tokio-runtime"] } +temporal-client = { version = "0.1.0", path = "./sdk-core/client" } +temporal-sdk-core = { version = "0.1.0", path = "./sdk-core/core" } +temporal-sdk-core-api = { version = "0.1.0", path = "./sdk-core/core-api" } +temporal-sdk-core-protos = { version = "0.1.0", path = "./sdk-core/sdk-core-protos" } +tokio = "1.15" +tonic = "0.6" +url = "2.2" \ No newline at end of file diff --git a/temporalio/bridge/client.py b/temporalio/bridge/client.py new file mode 100644 index 000000000..189a9c595 --- /dev/null +++ b/temporalio/bridge/client.py @@ -0,0 +1,80 @@ +import os +import socket +from dataclasses import dataclass, field +from typing import Mapping, Optional, Type, TypeVar + +import google.protobuf.message +import temporal_sdk_bridge + +import temporalio.api.workflowservice.v1 + + +@dataclass +class ClientTlsConfig: + server_root_ca_cert: Optional[bytes] + domain: Optional[str] + client_cert: Optional[bytes] + client_private_key: Optional[bytes] + + +@dataclass +class ClientRetryConfig: + initial_interval_millis: int = 100 + randomization_factor: float = 0.2 + multiplier: float = 1.5 + max_interval_millis: int = 5000 + max_elapsed_time_millis: Optional[int] = 10000 + max_retries: int = 10 + + +@dataclass +class ClientOptions: + target_url: str + client_name: str = "temporal-python" + # TODO(cretz): Take from importlib ref https://stackoverflow.com/a/54869712 + client_version: str = "0.1.0" + static_headers: Mapping[str, str] = field(default_factory=dict) + identity: str = f"{os.getpid()}@{socket.gethostname()}" + # TODO(cretz): Use proper name/version + worker_binary_id: str = "python-sdk@0.1.0" + tls_config: Optional[ClientTlsConfig] = None + retry_config: Optional[ClientRetryConfig] = None + + +ProtoMessage = TypeVar("ProtoMessage", bound=google.protobuf.message.Message) + + +class Client: + @staticmethod + async def connect(opts: ClientOptions) -> "Client": + return Client(await temporal_sdk_bridge.new_client(opts)) + + _ref: temporal_sdk_bridge.ClientRef + + def __init__(self, ref: temporal_sdk_bridge.ClientRef): + self._ref = ref + + async def start_workflow_execution( + self, + req: temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest, + *, + retry: bool = False, + ) -> temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse: + return await self.__rpc_call( + "start_workflow_execution", + req, + temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse, + retry=retry, + ) + + async def __rpc_call( + self, + rpc: str, + req: google.protobuf.message.Message, + resp_type: Type[ProtoMessage], + *, + retry: bool = False, + ) -> ProtoMessage: + resp = resp_type() + resp.ParseFromString(await self._ref.call(rpc, retry, req.SerializeToString())) + return resp diff --git a/temporalio/bridge/sdk-bridge/Cargo.toml b/temporalio/bridge/sdk-bridge/Cargo.toml deleted file mode 100644 index 153a20074..000000000 --- a/temporalio/bridge/sdk-bridge/Cargo.toml +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "temporal-sdk-core-bridge" -version = "0.1.0" -edition = "2021" - -[lib] -name = "temporal_sdk_core_bridge" -crate-type = ["cdylib"] - -[dependencies] -tokio = "1.15" -prost = "0.9" -prost-types = "0.9" -temporal-sdk-core = { version = "0.1.0", path = "../sdk-core/core" } -temporal-sdk-core-api = { version = "0.1.0", path = "../sdk-core/core-api" } -temporal-sdk-core-protos = { version = "0.1.0", path = "../sdk-core/sdk-core-protos" } -pyo3 = { version = "0.15", features = ["extension-module"] } -pyo3-asyncio = { version = "0.15", features = ["tokio-runtime"] } \ No newline at end of file diff --git a/temporalio/bridge/src/lib.rs b/temporalio/bridge/src/lib.rs new file mode 100644 index 000000000..a064232da --- /dev/null +++ b/temporalio/bridge/src/lib.rs @@ -0,0 +1,210 @@ +use pyo3::exceptions::{PyRuntimeError, PyValueError}; +use pyo3::prelude::*; +use std::collections::HashMap; +use std::time::Duration; +use tonic; + +#[pymodule] +fn temporal_sdk_bridge(_py: Python, m: &PyModule) -> PyResult<()> { + m.add_class::()?; + m.add_function(wrap_pyfunction!(new_client, m)?)?; + Ok(()) +} + +#[pyclass] +pub struct ClientRef { + retry_client: std::sync::Arc>, +} + +#[derive(FromPyObject)] +pub struct ClientOptions { + target_url: String, + client_name: String, + client_version: String, + static_headers: HashMap, + identity: String, + worker_binary_id: String, + tls_config: Option, + retry_config: Option, +} + +#[derive(FromPyObject)] +pub struct ClientTlsConfig { + server_root_ca_cert: Option>, + domain: Option, + client_cert: Option>, + client_private_key: Option>, +} + +#[derive(FromPyObject)] +pub struct ClientRetryConfig { + pub initial_interval_millis: u64, + pub randomization_factor: f64, + pub multiplier: f64, + pub max_interval_millis: u64, + pub max_elapsed_time_millis: Option, + pub max_retries: usize, +} + +#[pyfunction] +fn new_client(py: Python, opts: ClientOptions) -> PyResult<&PyAny> { + // TODO(cretz): Add metrics_meter? + let opts: temporal_client::ServerGatewayOptions = opts.try_into()?; + pyo3_asyncio::tokio::future_into_py(py, async move { + Ok(ClientRef { + retry_client: std::sync::Arc::new(opts.connect(None).await.map_err(|err| { + PyRuntimeError::new_err(format!("Failed client connect: {}", err)) + })?), + }) + }) +} + +#[pymethods] +impl ClientRef { + fn call<'p>( + &self, + py: Python<'p>, + rpc: String, + retry: bool, + req: Vec, + ) -> PyResult<&'p PyAny> { + let retry_client = self.retry_client.clone(); + pyo3_asyncio::tokio::future_into_py(py, async move { + let bytes = match rpc.as_str() { + "get_workflow_execution_history" => { + rpc_call!(retry_client, retry, get_workflow_execution_history, req) + } + "start_workflow_execution" => { + rpc_call!(retry_client, retry, start_workflow_execution, req) + } + _ => return Err(PyValueError::new_err(format!("Unknown RPC call {}", rpc))), + }?; + let bytes: &[u8] = &bytes; + Ok(Python::with_gil(|py| bytes.into_py(py))) + }) + } +} + +fn rpc_req

(bytes: Vec) -> PyResult> +where + P: prost::Message, + P: Default, +{ + let proto = P::decode(&*bytes) + .map_err(|err| PyValueError::new_err(format!("Invalid proto: {}", err)))?; + Ok(tonic::Request::new(proto)) +} + +fn rpc_resp

(res: Result, tonic::Status>) -> PyResult> +where + P: prost::Message, + P: Default, +{ + match res { + Ok(resp) => Ok(resp.get_ref().encode_to_vec()), + // TODO(cretz): Better error struct here w/ all the details + Err(err) => Err(PyRuntimeError::new_err(format!("RPC failed: {}", err))), + } +} + +fn clone_tonic_req(req: &tonic::Request) -> tonic::Request { + tonic::Request::new(req.get_ref().clone()) +} + +#[macro_export] +macro_rules! rpc_call { + ($retry_client:ident, $retry:ident, $call_name:ident, $req:ident) => { + if $retry { + // TODO(cretz): I wouldn't have to clone this if call_with_retry + // allowed error types other than tonic statuses + let req = rpc_req($req)?; + let fact = || { + let req = clone_tonic_req(&req); + let mut raw_client = $retry_client.get_client().raw_client().clone(); + async move { raw_client.$call_name(req).await } + }; + rpc_resp( + $retry_client + .call_with_retry( + fact, + temporal_client::CallType::Normal, + stringify!($call_name), + ) + .await, + ) + } else { + let mut raw_client = $retry_client.get_client().raw_client().clone(); + rpc_resp(raw_client.$call_name(rpc_req($req)?).await) + } + }; +} + +impl TryFrom for temporal_client::ServerGatewayOptions { + type Error = PyErr; + + fn try_from(opts: ClientOptions) -> PyResult { + let mut gateway_opts = temporal_client::ServerGatewayOptionsBuilder::default(); + gateway_opts + .target_url( + url::Url::parse(&opts.target_url) + .map_err(|err| PyValueError::new_err(format!("invalid target URL: {}", err)))?, + ) + // TODO(cretz): Unneeded + .namespace("".to_string()) + .client_name(opts.client_name) + .client_version(opts.client_version) + .static_headers(opts.static_headers) + .identity(opts.identity) + .worker_binary_id(opts.worker_binary_id) + .retry_config( + opts.retry_config + .map_or(temporal_client::RetryConfig::default(), |c| c.into()), + ); + // Builder does not allow us to set option here, so we have to make + // a conditional to even call it + if let Some(tls_config) = opts.tls_config { + gateway_opts.tls_cfg(tls_config.try_into()?); + } + return gateway_opts + .build() + .map_err(|err| PyValueError::new_err(format!("Invalid client options: {}", err))); + } +} + +impl TryFrom for temporal_client::TlsConfig { + type Error = PyErr; + + fn try_from(conf: ClientTlsConfig) -> PyResult { + Ok(temporal_client::TlsConfig { + server_root_ca_cert: conf.server_root_ca_cert, + domain: conf.domain, + client_tls_config: match (conf.client_cert, conf.client_private_key) { + (None, None) => None, + (Some(client_cert), Some(client_private_key)) => { + Some(temporal_client::ClientTlsConfig { + client_cert, + client_private_key, + }) + } + _ => { + return Err(PyValueError::new_err( + "Must have both client cert and private key or neither", + )) + } + }, + }) + } +} + +impl From for temporal_client::RetryConfig { + fn from(conf: ClientRetryConfig) -> Self { + temporal_client::RetryConfig { + initial_interval: Duration::from_millis(conf.initial_interval_millis), + randomization_factor: conf.randomization_factor, + multiplier: conf.multiplier, + max_interval: Duration::from_millis(conf.max_interval_millis), + max_elapsed_time: conf.max_elapsed_time_millis.map(Duration::from_millis), + max_retries: conf.max_retries, + } + } +} diff --git a/temporalio/converter.py b/temporalio/converter.py index 1f1b62ded..1154fdf5f 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -247,7 +247,10 @@ def encoding(self) -> str: async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" - if isinstance(value, google.protobuf.message.Message): + if ( + isinstance(value, google.protobuf.message.Message) + and value.DESCRIPTOR is not None + ): # We have to convert to dict then to JSON because MessageToJson does # not have a compact option removing spaces and newlines json_str = json.dumps( @@ -290,7 +293,10 @@ def encoding(self) -> str: async def encode(self, value: Any) -> Optional[temporalio.api.common.v1.Payload]: """See base class.""" - if isinstance(value, google.protobuf.message.Message): + if ( + isinstance(value, google.protobuf.message.Message) + and value.DESCRIPTOR is not None + ): return temporalio.api.common.v1.Payload( metadata={ "encoding": self.encoding.encode(), diff --git a/tests/bridge/__init__.py b/tests/bridge/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/bridge/test_client.py b/tests/bridge/test_client.py new file mode 100644 index 000000000..5c22b748c --- /dev/null +++ b/tests/bridge/test_client.py @@ -0,0 +1,27 @@ +import uuid + +import temporalio.api.common.v1 +import temporalio.api.taskqueue.v1 +import temporalio.api.workflowservice.v1 +import temporalio.bridge.client +import temporalio.converter + + +async def test_bridge_client(): + opts = temporalio.bridge.client.ClientOptions(target_url="http://localhost:7233") + client = await temporalio.bridge.client.Client.connect(opts) + task_queue = f"my-task-queue-{uuid.uuid4()}" + workflow_id = f"my-workflow-{uuid.uuid4()}" + resp = await client.start_workflow_execution( + temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest( + namespace="default", + workflow_id=workflow_id, + workflow_type=temporalio.api.common.v1.WorkflowType(name="my-workflow"), + task_queue=temporalio.api.taskqueue.v1.TaskQueue(name=task_queue), + input=temporalio.api.common.v1.Payloads( + payloads=await temporalio.converter.default().encode(["some string!"]) + ), + request_id=str(uuid.uuid4()), + ) + ) + print(f"Started workflow with run ID: {resp.run_id}") From 2f4dc57a18f87578a93c8510032fd60c6ce6847e Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Wed, 2 Feb 2022 17:29:27 -0600 Subject: [PATCH 12/15] More client work --- pyproject.toml | 2 +- temporalio/__init__.py | 1 + temporalio/bridge/client.py | 15 +- temporalio/client/__init__.py | 6 + temporalio/client/client.py | 122 ++++++++ temporalio/client/workflow_service.py | 279 ++++++++++++++++++ temporalio/common/__init__.py | 1 + temporalio/common/retry_policy.py | 18 ++ {tests/bridge => temporalio/util}/__init__.py | 0 temporalio/util/proto.py | 12 + tests/client/__init__.py | 0 .../test_workflow_service.py} | 9 +- 12 files changed, 445 insertions(+), 20 deletions(-) create mode 100644 temporalio/client/__init__.py create mode 100644 temporalio/client/client.py create mode 100644 temporalio/client/workflow_service.py create mode 100644 temporalio/common/__init__.py create mode 100644 temporalio/common/retry_policy.py rename {tests/bridge => temporalio/util}/__init__.py (100%) create mode 100644 temporalio/util/proto.py create mode 100644 tests/client/__init__.py rename tests/{bridge/test_client.py => client/test_workflow_service.py} (75%) diff --git a/pyproject.toml b/pyproject.toml index d3c03560a..6facdd052 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ pytest = "^6.2.5" pytest-asyncio = "^0.17.2" [tool.poe.tasks] -build = ["gen-protos", "build-bridge", "test"] +build = ["gen-protos", "build-bridge"] build-bridge = "python scripts/build-bridge.py" format = [{cmd = "black ."}, {cmd = "isort ."}] gen-protos = "python scripts/gen-protos.py" diff --git a/temporalio/__init__.py b/temporalio/__init__.py index e69de29bb..ef70fa32a 100644 --- a/temporalio/__init__.py +++ b/temporalio/__init__.py @@ -0,0 +1 @@ +from .common import RetryPolicy \ No newline at end of file diff --git a/temporalio/bridge/client.py b/temporalio/bridge/client.py index 189a9c595..7490c7e3b 100644 --- a/temporalio/bridge/client.py +++ b/temporalio/bridge/client.py @@ -54,20 +54,7 @@ async def connect(opts: ClientOptions) -> "Client": def __init__(self, ref: temporal_sdk_bridge.ClientRef): self._ref = ref - async def start_workflow_execution( - self, - req: temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest, - *, - retry: bool = False, - ) -> temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse: - return await self.__rpc_call( - "start_workflow_execution", - req, - temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse, - retry=retry, - ) - - async def __rpc_call( + async def rpc_call( self, rpc: str, req: google.protobuf.message.Message, diff --git a/temporalio/client/__init__.py b/temporalio/client/__init__.py new file mode 100644 index 000000000..8437f65c9 --- /dev/null +++ b/temporalio/client/__init__.py @@ -0,0 +1,6 @@ +from .workflow_service import ( + WorkflowService, + WorkflowServiceCall, + WorkflowServiceRequest, + WorkflowServiceResponse, +) diff --git a/temporalio/client/client.py b/temporalio/client/client.py new file mode 100644 index 000000000..1adb6a102 --- /dev/null +++ b/temporalio/client/client.py @@ -0,0 +1,122 @@ + +from datetime import timedelta +from enum import Enum +import os +import socket +from typing import Any, Awaitable, Generic, Optional, TypeVar, Union, overload +import uuid +from temporalio.api.enums.v1.workflow_pb2 import WorkflowIdReusePolicy +import temporalio.client +import temporalio.api.common.v1 +import temporalio.api.enums.v1 +import temporalio.api.taskqueue.v1 +import temporalio.api.workflowservice.v1 +import temporalio.converter +import temporalio.util.proto +import temporalio + +class WorkflowIDReusePolicy(Enum): + ALLOW_DUPLICATE = temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE + ALLOW_DUPLICATE_FAILED_ONLY = temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY + REJECT_DUPLICATE = temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE + +class Client: + + @staticmethod + async def connect(target_url: str, *, namespace: str="default", data_converter: temporalio.converter.DataConverter = temporalio.converter.default()) -> "Client": + return Client(await temporalio.client.WorkflowService.connect(target_url), namespace=namespace, data_converter=data_converter) + + def __init__(self, service: temporalio.client.WorkflowService, *, namespace: str="default", data_converter: temporalio.converter.DataConverter = temporalio.converter.default()): + self._service = service + self._namespace = namespace + self._data_converter = data_converter + + @property + def service(self) -> temporalio.client.WorkflowService: + return self._service + + @property + def namespace(self) -> str: + return self._namespace + + @property + def data_converter(self) -> temporalio.converter.DataConverter: + return self._data_converter + + async def start_workflow( + self, + workflow: str, + *args: Any, + task_queue: str, + # TODO(cretz): Should we require this? + id: str = str(uuid.uuid4()), + execution_timeout: Optional[timedelta] = None, + run_timeout: Optional[timedelta] = None, + task_timeout: Optional[timedelta] = None, + identity: str = f"{os.getpid()}@{socket.gethostname()}", + id_reuse_policy: WorkflowIdReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: Optional[temporalio.RetryPolicy] = None, + cron_schedule: Optional[str] = None + # TODO(cretz): Signal with start + # start_signal: Optional[str] = None, + # start_signal_args: list[Any] = [], + # ... additional options omitted for brevity + ) -> temporalio.WorkflowHandle[Any]: + input = None + if len(args) > 0: + input = temporalio.api.common.v1.Payloads(payloads = await self._data_converter.encode(list(args))) + proto_retry_policy = None + if retry_policy is not None: + proto_retry_policy = retry_policy.to_proto() + req = temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest( + namespace=self._namespace, + workflow_id=id, + workflow_type=temporalio.api.common.v1.WorkflowType(name=workflow), + task_queue=temporalio.api.taskqueue.v1.TaskQueue(name=task_queue), + input=input, + workflow_execution_timeout=temporalio.util.proto.from_timedelta(execution_timeout), + workflow_run_timeout=temporalio.util.proto.from_timedelta(run_timeout), + workflow_task_timeout=temporalio.util.proto.from_timedelta(task_timeout), + identity=identity, + request_id=str(uuid.uuid4()), + workflow_id_reuse_policy = id_reuse_policy, + retry_policy = None if retry_policy is None else retry_policy.to_proto(), + cron_schedule: typing.Text = ..., + memo: typing.Optional[temporal.api.common.v1.message_pb2.Memo] = ..., + search_attributes: typing.Optional[temporal.api.common.v1.message_pb2.SearchAttributes] = ..., + header: typing.Optional[temporal.api.common.v1.message_pb2.Header] = ..., + ) + # TODO(cretz): The rest + pass + +T = TypeVar("T") + +class WorkflowHandle(Generic[T]): + def __init__( + self, + client: Client, + id: str, + *, + run_id: Optional[str] = None + ) -> None: + self._client = client + self._id = id + self._run_id = run_id + + async def result(self) -> T: + pass + + async def cancel(self): + pass + + async def describe(self) -> temporalio.WorkflowExecution: + pass + + async def query(self, name: str, *args: Any) -> Any: + pass + + async def signal(self, name: str, *args: Any): + pass + + async def terminate(self, *, reason: Optional[str] = None): + pass \ No newline at end of file diff --git a/temporalio/client/workflow_service.py b/temporalio/client/workflow_service.py new file mode 100644 index 000000000..19b2f9ea3 --- /dev/null +++ b/temporalio/client/workflow_service.py @@ -0,0 +1,279 @@ +from abc import ABC, abstractmethod +from typing import Generic, Type, TypeVar + +import google.protobuf.message + +import temporalio.api.workflowservice.v1 +import temporalio.bridge.client + +WorkflowServiceRequest = TypeVar( + "WorkflowServiceRequest", bound=google.protobuf.message.Message +) +WorkflowServiceResponse = TypeVar( + "WorkflowServiceResponse", bound=google.protobuf.message.Message +) + + +class WorkflowService(ABC): + @staticmethod + async def connect(target_url: str) -> "WorkflowService": + return await BridgeWorkflowService.connect(target_url=target_url) + + def __init__(self) -> None: + super().__init__() + + wsv1 = temporalio.api.workflowservice.v1 + + self.count_workflow_executions = self.__new_call( + "count_workflow_executions", + wsv1.CountWorkflowExecutionsRequest, + wsv1.CountWorkflowExecutionsResponse, + ) + self.deprecate_namespace = self.__new_call( + "deprecate_namespace", + wsv1.DeprecateNamespaceRequest, + wsv1.DeprecateNamespaceResponse, + ) + self.describe_namespace = self.__new_call( + "describe_namespace", + wsv1.DescribeNamespaceRequest, + wsv1.DescribeNamespaceResponse, + ) + self.describe_task_queue = self.__new_call( + "describe_task_queue", + wsv1.DescribeTaskQueueRequest, + wsv1.DescribeTaskQueueResponse, + ) + self.describe_workflow_execution = self.__new_call( + "describe_workflow_execution", + wsv1.DescribeWorkflowExecutionRequest, + wsv1.DescribeWorkflowExecutionResponse, + ) + self.get_cluster_info = self.__new_call( + "get_cluster_info", + wsv1.GetClusterInfoRequest, + wsv1.GetClusterInfoResponse, + ) + self.get_search_attributes = self.__new_call( + "get_search_attributes", + wsv1.GetSearchAttributesRequest, + wsv1.GetSearchAttributesResponse, + ) + self.get_workflow_execution_history = self.__new_call( + "get_workflow_execution_history", + wsv1.GetWorkflowExecutionHistoryRequest, + wsv1.GetWorkflowExecutionHistoryResponse, + ) + self.list_archived_workflow_executions = self.__new_call( + "list_archived_workflow_executions", + wsv1.ListArchivedWorkflowExecutionsRequest, + wsv1.ListArchivedWorkflowExecutionsResponse, + ) + self.list_closed_workflow_executions = self.__new_call( + "list_closed_workflow_executions", + wsv1.ListClosedWorkflowExecutionsRequest, + wsv1.ListClosedWorkflowExecutionsResponse, + ) + self.list_namespaces = self.__new_call( + "list_namespaces", + wsv1.ListNamespacesRequest, + wsv1.ListNamespacesResponse, + ) + self.list_open_workflow_executions = self.__new_call( + "list_open_workflow_executions", + wsv1.ListOpenWorkflowExecutionsRequest, + wsv1.ListOpenWorkflowExecutionsResponse, + ) + self.list_task_queue_partitions = self.__new_call( + "list_task_queue_partitions", + wsv1.ListTaskQueuePartitionsRequest, + wsv1.ListTaskQueuePartitionsResponse, + ) + self.list_workflow_executions = self.__new_call( + "list_workflow_executions", + wsv1.ListWorkflowExecutionsRequest, + wsv1.ListWorkflowExecutionsResponse, + ) + self.poll_activity_task_queue = self.__new_call( + "poll_activity_task_queue", + wsv1.PollActivityTaskQueueRequest, + wsv1.PollActivityTaskQueueResponse, + ) + self.poll_workflow_task_queue = self.__new_call( + "poll_workflow_task_queue", + wsv1.PollWorkflowTaskQueueRequest, + wsv1.PollWorkflowTaskQueueResponse, + ) + self.query_workflow = self.__new_call( + "query_workflow", + wsv1.QueryWorkflowRequest, + wsv1.QueryWorkflowResponse, + ) + self.record_activity_task_heartbeat = self.__new_call( + "record_activity_task_heartbeat", + wsv1.RecordActivityTaskHeartbeatRequest, + wsv1.RecordActivityTaskHeartbeatResponse, + ) + self.record_activity_task_heartbeat_by_id = self.__new_call( + "record_activity_task_heartbeat_by_id", + wsv1.RecordActivityTaskHeartbeatByIdRequest, + wsv1.RecordActivityTaskHeartbeatByIdResponse, + ) + self.register_namespace = self.__new_call( + "register_namespace", + wsv1.RegisterNamespaceRequest, + wsv1.RegisterNamespaceResponse, + ) + self.request_cancel_workflow_execution = self.__new_call( + "request_cancel_workflow_execution", + wsv1.RequestCancelWorkflowExecutionRequest, + wsv1.RequestCancelWorkflowExecutionResponse, + ) + self.reset_sticky_task_queue = self.__new_call( + "reset_sticky_task_queue", + wsv1.ResetStickyTaskQueueRequest, + wsv1.ResetStickyTaskQueueResponse, + ) + self.reset_workflow_execution = self.__new_call( + "reset_workflow_execution", + wsv1.ResetWorkflowExecutionRequest, + wsv1.ResetWorkflowExecutionResponse, + ) + self.respond_activity_task_canceled = self.__new_call( + "respond_activity_task_canceled", + wsv1.RespondActivityTaskCanceledRequest, + wsv1.RespondActivityTaskCanceledResponse, + ) + self.respond_activity_task_canceled_by_id = self.__new_call( + "respond_activity_task_canceled_by_id", + wsv1.RespondActivityTaskCanceledByIdRequest, + wsv1.RespondActivityTaskCanceledByIdResponse, + ) + self.respond_activity_task_completed = self.__new_call( + "respond_activity_task_completed", + wsv1.RespondActivityTaskCompletedRequest, + wsv1.RespondActivityTaskCompletedResponse, + ) + self.respond_activity_task_completed_by_id = self.__new_call( + "respond_activity_task_completed_by_id", + wsv1.RespondActivityTaskCompletedByIdRequest, + wsv1.RespondActivityTaskCompletedByIdResponse, + ) + self.respond_activity_task_failed = self.__new_call( + "respond_activity_task_failed", + wsv1.RespondActivityTaskFailedRequest, + wsv1.RespondActivityTaskFailedResponse, + ) + self.respond_activity_task_failed_by_id = self.__new_call( + "respond_activity_task_failed_by_id", + wsv1.RespondActivityTaskFailedByIdRequest, + wsv1.RespondActivityTaskFailedByIdResponse, + ) + self.respond_query_task_completed = self.__new_call( + "respond_query_task_completed", + wsv1.RespondQueryTaskCompletedRequest, + wsv1.RespondQueryTaskCompletedResponse, + ) + self.respond_workflow_task_completed = self.__new_call( + "respond_workflow_task_completed", + wsv1.RespondWorkflowTaskCompletedRequest, + wsv1.RespondWorkflowTaskCompletedResponse, + ) + self.respond_workflow_task_failed = self.__new_call( + "respond_workflow_task_failed", + wsv1.RespondWorkflowTaskFailedRequest, + wsv1.RespondWorkflowTaskFailedResponse, + ) + self.scan_workflow_executions = self.__new_call( + "scan_workflow_executions", + wsv1.ScanWorkflowExecutionsRequest, + wsv1.ScanWorkflowExecutionsResponse, + ) + self.signal_with_start_workflow_execution = self.__new_call( + "signal_with_start_workflow_execution", + wsv1.SignalWithStartWorkflowExecutionRequest, + wsv1.SignalWithStartWorkflowExecutionResponse, + ) + self.signal_workflow_execution = self.__new_call( + "signal_workflow_execution", + wsv1.SignalWorkflowExecutionRequest, + wsv1.SignalWorkflowExecutionResponse, + ) + self.terminate_workflow_execution = self.__new_call( + "terminate_workflow_execution", + wsv1.TerminateWorkflowExecutionRequest, + wsv1.TerminateWorkflowExecutionResponse, + ) + self.reset_workflow_execution = self.__new_call( + "reset_workflow_execution", + wsv1.ResetWorkflowExecutionRequest, + wsv1.ResetWorkflowExecutionResponse, + ) + self.update_namespace = self.__new_call( + "update_namespace", + wsv1.UpdateNamespaceRequest, + wsv1.UpdateNamespaceResponse, + ) + + @abstractmethod + async def _rpc_call( + self, + rpc: str, + req: google.protobuf.message.Message, + resp_type: Type[WorkflowServiceResponse], + *, + retry: bool = False, + ) -> WorkflowServiceResponse: + raise NotImplementedError + + def __new_call( + self, + name: str, + req_type: Type[WorkflowServiceRequest], + resp_type: Type[WorkflowServiceResponse], + ) -> "WorkflowServiceCall[WorkflowServiceRequest, WorkflowServiceResponse]": + return WorkflowServiceCall(self, name, req_type, resp_type) + + +class WorkflowServiceCall(Generic[WorkflowServiceRequest, WorkflowServiceResponse]): + def __init__( + self, + service: WorkflowService, + name: str, + req_type: Type[WorkflowServiceRequest], + resp_type: Type[WorkflowServiceResponse], + ) -> None: + self.service = service + self.name = name + self.resp_type = resp_type + + async def __call__( + self, req: WorkflowServiceRequest, *, retry: bool = False + ) -> WorkflowServiceResponse: + return await self.service._rpc_call(self.name, req, self.resp_type, retry=retry) + + +class BridgeWorkflowService(WorkflowService): + @staticmethod + async def connect(target_url: str) -> "BridgeWorkflowService": + return BridgeWorkflowService( + await temporalio.bridge.client.Client.connect( + temporalio.bridge.client.ClientOptions(target_url=target_url) + ) + ) + + _bridge_client: temporalio.bridge.client.Client + + def __init__(self, bridge_client: temporalio.bridge.client.Client) -> None: + super().__init__() + self._bridge_client = bridge_client + + async def _rpc_call( + self, + rpc: str, + req: google.protobuf.message.Message, + resp_type: Type[WorkflowServiceResponse], + *, + retry: bool = False, + ) -> WorkflowServiceResponse: + return await self._bridge_client.rpc_call(rpc, req, resp_type, retry=retry) diff --git a/temporalio/common/__init__.py b/temporalio/common/__init__.py new file mode 100644 index 000000000..34150a0e5 --- /dev/null +++ b/temporalio/common/__init__.py @@ -0,0 +1 @@ +from .retry_policy import RetryPolicy diff --git a/temporalio/common/retry_policy.py b/temporalio/common/retry_policy.py new file mode 100644 index 000000000..8fd936444 --- /dev/null +++ b/temporalio/common/retry_policy.py @@ -0,0 +1,18 @@ +from dataclasses import dataclass +from datetime import timedelta +from typing import Optional + +import temporalio.api.common.v1 + + +@dataclass +class RetryPolicy: + initial_interval: timedelta + randomization_factor: float + multiplier: float + max_interval: timedelta + max_elapsed_time: Optional[timedelta] + max_retries: int + + def to_proto(self) -> temporalio.api.common.v1.RetryPolicy: + raise NotImplementedError \ No newline at end of file diff --git a/tests/bridge/__init__.py b/temporalio/util/__init__.py similarity index 100% rename from tests/bridge/__init__.py rename to temporalio/util/__init__.py diff --git a/temporalio/util/proto.py b/temporalio/util/proto.py new file mode 100644 index 000000000..ef4f1ca6b --- /dev/null +++ b/temporalio/util/proto.py @@ -0,0 +1,12 @@ + +from datetime import timedelta +from typing import Optional +import google.protobuf.duration_pb2 + + +def optional_timedelta_to_duration(d: Optional[timedelta]) -> Optional[google.protobuf.duration_pb2.Duration]: + if d is None: + return None + ret = google.protobuf.duration_pb2.Duration() + ret.FromTimedelta(d) + return ret \ No newline at end of file diff --git a/tests/client/__init__.py b/tests/client/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/bridge/test_client.py b/tests/client/test_workflow_service.py similarity index 75% rename from tests/bridge/test_client.py rename to tests/client/test_workflow_service.py index 5c22b748c..704241cfa 100644 --- a/tests/bridge/test_client.py +++ b/tests/client/test_workflow_service.py @@ -3,16 +3,15 @@ import temporalio.api.common.v1 import temporalio.api.taskqueue.v1 import temporalio.api.workflowservice.v1 -import temporalio.bridge.client +import temporalio.client import temporalio.converter -async def test_bridge_client(): - opts = temporalio.bridge.client.ClientOptions(target_url="http://localhost:7233") - client = await temporalio.bridge.client.Client.connect(opts) +async def test_simple(): + service = await temporalio.client.WorkflowService.connect("http://localhost:7233") task_queue = f"my-task-queue-{uuid.uuid4()}" workflow_id = f"my-workflow-{uuid.uuid4()}" - resp = await client.start_workflow_execution( + resp = await service.start_workflow_execution( temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest( namespace="default", workflow_id=workflow_id, From 5488fd6f3107dddd55229c3ac26cb9e1f1540d76 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Thu, 3 Feb 2022 14:13:00 -0600 Subject: [PATCH 13/15] More client work --- scripts/gen-protos.py | 36 +- temporalio/__init__.py | 2 +- temporalio/client.py | 646 ++++++++++++++++++ temporalio/client/__init__.py | 6 - temporalio/client/client.py | 122 ---- temporalio/common/retry_policy.py | 4 +- temporalio/converter.py | 18 +- temporalio/failure.py | 61 ++ temporalio/util/__init__.py | 0 temporalio/util/proto.py | 12 - temporalio/{client => }/workflow_service.py | 83 +-- tests/api/test_grpc_stub.py | 2 +- tests/client/__init__.py | 0 tests/client/test_workflow_service.py | 26 - tests/test_client.py | 15 + .../{converter_test.py => test_converter.py} | 0 16 files changed, 802 insertions(+), 231 deletions(-) create mode 100644 temporalio/client.py delete mode 100644 temporalio/client/__init__.py delete mode 100644 temporalio/client/client.py create mode 100644 temporalio/failure.py delete mode 100644 temporalio/util/__init__.py delete mode 100644 temporalio/util/proto.py rename temporalio/{client => }/workflow_service.py (77%) delete mode 100644 tests/client/__init__.py delete mode 100644 tests/client/test_workflow_service.py create mode 100644 tests/test_client.py rename tests/{converter_test.py => test_converter.py} (100%) diff --git a/scripts/gen-protos.py b/scripts/gen-protos.py index eb6a3ea15..d01d2b9ea 100644 --- a/scripts/gen-protos.py +++ b/scripts/gen-protos.py @@ -18,15 +18,19 @@ api_out_dir = base_dir / "temporalio" / "api" sdk_out_dir = base_dir / "temporalio" / "bridge" / "proto" -fix_api_import = partial( - re.compile(r"from temporal\.api\.").sub, r"from temporalio.api." -) -fix_dependency_import = partial( - re.compile(r"from dependencies\.").sub, r"from temporalio.api.dependencies." -) -fix_sdk_import = partial( - re.compile(r"from temporal\.sdk\.core\.").sub, r"from temporalio.bridge.proto." -) +py_fixes = [ + partial(re.compile(r"from temporal\.api\.").sub, r"from temporalio.api."), + partial( + re.compile(r"from dependencies\.").sub, r"from temporalio.api.dependencies." + ), + partial( + re.compile(r"from temporal\.sdk\.core\.").sub, r"from temporalio.bridge.proto." + ), +] + +pyi_fixes = [ + partial(re.compile(r"temporal\.api\.").sub, r"temporalio.api."), +] find_message_re = re.compile(r"_sym_db\.RegisterMessage\(([^\)\.]+)\)") find_enum_re = re.compile(r"DESCRIPTOR\.enum_types_by_name\['([^']+)'\] =") @@ -46,18 +50,20 @@ def fix_generated_output(base_path: Path): for p in base_path.iterdir(): if p.is_dir(): fix_generated_output(p) - else: + elif p.suffix == ".py" or p.suffix == ".pyi": with p.open(encoding="utf8") as f: content = f.read() - content = fix_api_import(content) - content = fix_dependency_import(content) - content = fix_sdk_import(content) - # Only use .py files to determine imports, not pyi ones if p.suffix == ".py": + for fix in py_fixes: + content = fix(content) + # Only use .py files to determine imports, not pyi ones imports[p.stem] += find_message_re.findall(content) imports[p.stem] += find_enum_re.findall(content) imports[p.stem] += find_class_re.findall(content) imports[p.stem] += find_def_re.findall(content) + else: + for fix in pyi_fixes: + content = fix(content) with p.open("w") as f: f.write(content) # Write init @@ -84,7 +90,7 @@ def fix_generated_output(base_path: Path): *map(str, proto_paths), ] ) - # Apply import fixes before moving code + # Apply fixes before moving code fix_generated_output(temp_dir) # Move protos for p in (temp_dir / "temporal" / "api").iterdir(): diff --git a/temporalio/__init__.py b/temporalio/__init__.py index ef70fa32a..f4c2c89ad 100644 --- a/temporalio/__init__.py +++ b/temporalio/__init__.py @@ -1 +1 @@ -from .common import RetryPolicy \ No newline at end of file +from .common import RetryPolicy diff --git a/temporalio/client.py b/temporalio/client.py new file mode 100644 index 000000000..77bdead7f --- /dev/null +++ b/temporalio/client.py @@ -0,0 +1,646 @@ +import logging +import os +import socket +import uuid +from dataclasses import dataclass +from datetime import timedelta +from enum import IntEnum +from typing import Any, Generic, Iterable, Mapping, Optional, TypeVar, Union, cast + +import temporalio +import temporalio.api.common.v1 +import temporalio.api.enums.v1 +import temporalio.api.failure.v1 +import temporalio.api.history.v1 +import temporalio.api.taskqueue.v1 +import temporalio.api.workflowservice.v1 +import temporalio.converter +import temporalio.failure +import temporalio.workflow_service + +logger = logging.getLogger(__name__) + + +class WorkflowIDReusePolicy(IntEnum): + ALLOW_DUPLICATE = int( + temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE + ) + ALLOW_DUPLICATE_FAILED_ONLY = int( + temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY + ) + REJECT_DUPLICATE = int( + temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE + ) + + +class WorkflowQueryRejectCondition(IntEnum): + NONE = int(temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NONE) + NOT_OPEN = int( + temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_OPEN + ) + NOT_COMPLETED_CLEANLY = int( + temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_COMPLETED_CLEANLY + ) + + +class WorkflowExecutionStatus(IntEnum): + RUNNING = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_RUNNING + ) + COMPLETED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED + ) + FAILED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED + ) + CANCELED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CANCELED + ) + TERMINATED = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TERMINATED + ) + CONTINUED_AS_NEW = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW + ) + TIMED_OUT = int( + temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TIMED_OUT + ) + + +class Client: + @staticmethod + async def connect( + target_url: str, + *, + namespace: str = "default", + identity: str = f"{os.getpid()}@{socket.gethostname()}", + data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), + interceptors: Iterable["Interceptor"] = [], + workflow_query_reject_condition: Optional[WorkflowQueryRejectCondition] = None, + ) -> "Client": + return Client( + await temporalio.workflow_service.WorkflowService.connect(target_url), + namespace=namespace, + identity=identity, + data_converter=data_converter, + interceptors=interceptors, + workflow_query_reject_condition=workflow_query_reject_condition, + ) + + def __init__( + self, + service: temporalio.workflow_service.WorkflowService, + *, + namespace: str = "default", + identity: str = f"{os.getpid()}@{socket.gethostname()}", + data_converter: temporalio.converter.DataConverter = temporalio.converter.default(), + interceptors: Iterable["Interceptor"] = [], + workflow_query_reject_condition: Optional[WorkflowQueryRejectCondition] = None, + ): + self._service = service + self._namespace = namespace + self._identity = identity + self._data_converter = data_converter + self._interceptors = interceptors + self._workflow_query_reject_condition = workflow_query_reject_condition + + # Iterate over interceptors in reverse building the impl + self._impl: OutboundInterceptor = _ClientImpl(self) + for interceptor in reversed(list(interceptors)): + self._impl = interceptor.intercept_client(self._impl) + + @property + def service(self) -> temporalio.workflow_service.WorkflowService: + return self._service + + @property + def namespace(self) -> str: + return self._namespace + + @property + def identity(self) -> str: + return self._identity + + @property + def data_converter(self) -> temporalio.converter.DataConverter: + return self._data_converter + + async def start_workflow( + self, + workflow: str, + *args: Any, + id: str, + task_queue: str, + execution_timeout: Optional[timedelta] = None, + run_timeout: Optional[timedelta] = None, + task_timeout: Optional[timedelta] = None, + id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: Optional[temporalio.RetryPolicy] = None, + cron_schedule: str = "", + memo: Optional[Mapping[str, Any]] = None, + search_attributes: Optional[Mapping[str, Any]] = None, + header: Optional[Mapping[str, Any]] = None, + start_signal: Optional[str] = None, + start_signal_args: Iterable[Any] = [], + ) -> "WorkflowHandle[Any]": + return await self._impl.start_workflow( + StartWorkflowInput( + workflow=workflow, + args=args, + id=id, + task_queue=task_queue, + execution_timeout=execution_timeout, + run_timeout=run_timeout, + task_timeout=task_timeout, + id_reuse_policy=id_reuse_policy, + retry_policy=retry_policy, + cron_schedule=cron_schedule, + memo=memo, + search_attributes=search_attributes, + header=header, + start_signal=start_signal, + start_signal_args=start_signal_args, + ) + ) + + async def execute_workflow( + self, + workflow: str, + *args: Any, + id: str, + task_queue: str, + execution_timeout: Optional[timedelta] = None, + run_timeout: Optional[timedelta] = None, + task_timeout: Optional[timedelta] = None, + id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, + retry_policy: Optional[temporalio.RetryPolicy] = None, + cron_schedule: str = "", + memo: Optional[Mapping[str, Any]] = None, + search_attributes: Optional[Mapping[str, Any]] = None, + header: Optional[Mapping[str, Any]] = None, + start_signal: Optional[str] = None, + start_signal_args: list[Any] = [], + ) -> Any: + return await ( + await self.start_workflow( + workflow, + *args, + task_queue=task_queue, + id=id, + execution_timeout=execution_timeout, + run_timeout=run_timeout, + task_timeout=task_timeout, + id_reuse_policy=id_reuse_policy, + retry_policy=retry_policy, + cron_schedule=cron_schedule, + memo=memo, + search_attributes=search_attributes, + header=header, + start_signal=start_signal, + start_signal_args=start_signal_args, + ) + ).result() + + def get_workflow_handle( + self, workflow_id: str, run_id: Optional[str] = None + ) -> "WorkflowHandle[Any]": + return WorkflowHandle(self, workflow_id, run_id=run_id) + + +T = TypeVar("T") + + +class WorkflowHandle(Generic[T]): + def __init__( + self, client: Client, id: str, *, run_id: Optional[str] = None + ) -> None: + self._client = client + self._id = id + self._run_id = run_id + + @property + def id(self) -> str: + return self._id + + @property + def run_id(self) -> Optional[str]: + return self._run_id + + async def result(self, *, follow_runs: bool = True) -> T: + req = temporalio.api.workflowservice.v1.GetWorkflowExecutionHistoryRequest( + namespace=self._client.namespace, + execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=self._id, run_id=self._run_id or "" + ), + wait_new_event=True, + history_event_filter_type=temporalio.api.enums.v1.HistoryEventFilterType.HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT, + skip_archival=True, + ) + while True: + resp = await self._client.service.get_workflow_execution_history(req) + # Continually ask for pages until we get close + if len(resp.history.events) == 0: + req.next_page_token = resp.next_page_token + continue + elif len(resp.history.events) != 1: + raise RuntimeError( + f"Expected single close event, got {len(resp.history.events)}" + ) + event = resp.history.events[0] + if event.HasField("workflow_execution_completed_event_attributes"): + complete_attr = event.workflow_execution_completed_event_attributes + # Follow execution + if follow_runs and complete_attr.new_execution_run_id: + req.execution.run_id = complete_attr.new_execution_run_id + req.next_page_token = b"" + continue + # Ignoring anything after the first response like TypeScript + if not complete_attr.result: + return cast(T, None) + results = await self._client.data_converter.decode( + complete_attr.result.payloads + ) + if not results: + return cast(T, None) + elif len(results) > 1: + logger.warning("Expected single result, got %s", len(results)) + return cast(T, results[0]) + elif event.HasField("workflow_execution_failed_event_attributes"): + fail_attr = event.workflow_execution_failed_event_attributes + # Follow execution + if follow_runs and fail_attr.new_execution_run_id: + req.execution.run_id = fail_attr.new_execution_run_id + req.next_page_token = b"" + continue + raise WorkflowFailureError( + cause=await temporalio.failure.FailureError.from_proto( + fail_attr.failure, self._client.data_converter + ) + ) + elif event.HasField("workflow_execution_canceled_event_attributes"): + cancel_attr = event.workflow_execution_canceled_event_attributes + details = [] + if cancel_attr.details and cancel_attr.details.payloads: + details = await self._client.data_converter.decode( + cancel_attr.details.payloads + ) + raise WorkflowFailureError( + cause=temporalio.failure.FailureError( + "Workflow cancelled", + temporalio.failure.CancelledFailure(*details), + ) + ) + elif event.HasField("workflow_execution_terminated_event_attributes"): + term_attr = event.workflow_execution_terminated_event_attributes + details = [] + if term_attr.details and term_attr.details.payloads: + details = await self._client.data_converter.decode( + term_attr.details.payloads + ) + raise WorkflowFailureError( + cause=temporalio.failure.FailureError( + term_attr.reason if term_attr.reason else "Workflow terminated", + temporalio.failure.TerminatedFailure( + *details, + reason=term_attr.reason if term_attr.reason else None, + ), + ) + ) + elif event.HasField("workflow_execution_timed_out_event_attributes"): + time_attr = event.workflow_execution_timed_out_event_attributes + # Follow execution + if follow_runs and time_attr.new_execution_run_id: + req.execution.run_id = time_attr.new_execution_run_id + req.next_page_token = b"" + continue + raise WorkflowFailureError( + cause=temporalio.failure.FailureError( + "Workflow timed out", + temporalio.failure.TimeoutFailure( + temporalio.failure.TimeoutType.START_TO_CLOSE + ), + ) + ) + elif event.HasField("workflow_execution_continued_as_new_event_attributes"): + cont_attr = event.workflow_execution_continued_as_new_event_attributes + if not cont_attr.new_execution_run_id: + raise RuntimeError( + "Unexpectedly missing new run ID from continue as new" + ) + # Follow execution + if follow_runs: + req.execution.run_id = cont_attr.new_execution_run_id + req.next_page_token = b"" + continue + raise WorkflowContinuedAsNewError(cont_attr.new_execution_run_id) + else: + raise RuntimeError( + f"Unexpected close event attribute of {event.WhichOneof('attributes')}" + ) + + async def cancel(self) -> None: + await self._client._impl.cancel_workflow( + CancelWorkflowInput(id=self._id, run_id=self._run_id) + ) + + # TODO(cretz): Wrap the result in Python-friendlier type? + async def describe( + self, + ) -> temporalio.api.workflowservice.v1.DescribeWorkflowExecutionResponse: + return await self._client.service.describe_workflow_execution( + temporalio.api.workflowservice.v1.DescribeWorkflowExecutionRequest( + namespace=self._client.namespace, + execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=self._id, + run_id=self._run_id if self._run_id else "", + ), + ) + ) + + async def query(self, name: str, *args: Any) -> Any: + return await self._client._impl.query_workflow( + QueryWorkflowInput( + id=self._id, + run_id=self._run_id, + query=name, + args=args, + reject_condition=self._client._workflow_query_reject_condition, + ) + ) + + async def signal(self, name: str, *args: Any) -> None: + await self._client._impl.signal_workflow( + SignalWorkflowInput( + id=self._id, + run_id=self._run_id, + signal=name, + args=args, + ) + ) + + async def terminate(self, *args: Any, reason: Optional[str] = None) -> None: + await self._client._impl.terminate_workflow( + TerminateWorkflowInput( + id=self._id, + run_id=self._run_id, + args=args, + reason=reason, + ) + ) + + +@dataclass +class StartWorkflowInput: + workflow: str + args: Iterable[Any] + id: str + task_queue: str + execution_timeout: Optional[timedelta] + run_timeout: Optional[timedelta] + task_timeout: Optional[timedelta] + id_reuse_policy: WorkflowIDReusePolicy + retry_policy: Optional[temporalio.RetryPolicy] + cron_schedule: str + memo: Optional[Mapping[str, Any]] + search_attributes: Optional[Mapping[str, Any]] + header: Optional[Mapping[str, Any]] + start_signal: Optional[str] + start_signal_args: Iterable[Any] + + +@dataclass +class CancelWorkflowInput: + id: str + run_id: Optional[str] + + +@dataclass +class QueryWorkflowInput: + id: str + run_id: Optional[str] + query: str + args: Iterable[Any] + reject_condition: Optional[WorkflowQueryRejectCondition] + + +@dataclass +class SignalWorkflowInput: + id: str + run_id: Optional[str] + signal: str + args: Iterable[Any] + + +@dataclass +class TerminateWorkflowInput: + id: str + run_id: Optional[str] + args: Iterable[Any] + reason: Optional[str] + + +class Interceptor: + def intercept_client(self, next: "OutboundInterceptor") -> "OutboundInterceptor": + return next + + +class OutboundInterceptor: + def __init__(self, next: "OutboundInterceptor") -> None: + self.next = next + + async def start_workflow(self, input: StartWorkflowInput) -> WorkflowHandle[Any]: + return await self.next.start_workflow(input) + + async def cancel_workflow(self, input: CancelWorkflowInput) -> None: + await self.next.cancel_workflow(input) + + async def query_workflow(self, input: QueryWorkflowInput) -> Any: + return await self.next.query_workflow(input) + + async def signal_workflow(self, input: SignalWorkflowInput) -> None: + await self.next.signal_workflow(input) + + async def terminate_workflow(self, input: TerminateWorkflowInput) -> None: + await self.next.terminate_workflow(input) + + +class _ClientImpl(OutboundInterceptor): + def __init__(self, client: Client) -> None: + # We are intentionally not calling the base class's __init__ here + self._client = client + + async def start_workflow(self, input: StartWorkflowInput) -> WorkflowHandle[Any]: + # Build request + req: Union[ + temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest, + temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionRequest, + ] + if input.start_signal is not None: + req = temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionRequest( + signal_name=input.start_signal + ) + if input.start_signal_args: + req.signal_input.payloads.extend( + await self._client.data_converter.encode(input.start_signal_args) + ) + else: + req = temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest() + req.namespace = self._client.namespace + req.workflow_id = input.id + req.workflow_type.name = input.workflow + req.task_queue.name = input.task_queue + if input.args: + req.input.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + if input.execution_timeout is not None: + req.workflow_execution_timeout.FromTimedelta(input.execution_timeout) + if input.run_timeout is not None: + req.workflow_run_timeout.FromTimedelta(input.run_timeout) + if input.task_timeout is not None: + req.workflow_task_timeout.FromTimedelta(input.task_timeout) + req.identity = self._client.identity + req.request_id = str(uuid.uuid4()) + req.workflow_id_reuse_policy = cast( + "temporalio.api.enums.v1.WorkflowIdReusePolicy.ValueType", + int(input.id_reuse_policy), + ) + if input.retry_policy is not None: + input.retry_policy.apply_to_proto(req.retry_policy) + req.cron_schedule = input.cron_schedule + if input.memo is not None: + for k, v in input.memo.items(): + req.memo.fields[k] = (await self._client.data_converter.encode([v]))[0] + if input.search_attributes is not None: + for k, v in input.search_attributes.items(): + req.search_attributes.indexed_fields[k] = ( + await self._client.data_converter.encode([v]) + )[0] + if input.header is not None: + for k, v in input.header.items(): + req.header.fields[k] = (await self._client.data_converter.encode([v]))[ + 0 + ] + + # Start with signal or just normal start + resp: Union[ + temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionResponse, + temporalio.api.workflowservice.v1.StartWorkflowExecutionResponse, + ] + if isinstance( + req, + temporalio.api.workflowservice.v1.SignalWithStartWorkflowExecutionRequest, + ): + resp = await self._client.service.signal_with_start_workflow_execution( + req, retry=True + ) + else: + resp = await self._client.service.start_workflow_execution(req, retry=True) + return WorkflowHandle(self._client, req.workflow_id, run_id=resp.run_id) + + async def cancel_workflow(self, input: CancelWorkflowInput) -> None: + await self._client.service.request_cancel_workflow_execution( + temporalio.api.workflowservice.v1.RequestCancelWorkflowExecutionRequest( + namespace=self._client.namespace, + workflow_execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id if input.run_id else "", + ), + identity=self._client.identity, + request_id=str(uuid.uuid4()), + ), + retry=True, + ) + + async def query_workflow(self, input: QueryWorkflowInput) -> Any: + req = temporalio.api.workflowservice.v1.QueryWorkflowRequest( + namespace=self._client.namespace, + execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id if input.run_id else "", + ) + # TODO(cretz): Headers here and elsewhere + ) + if input.reject_condition: + req.query_reject_condition = cast( + "temporalio.api.enums.v1.QueryRejectCondition.ValueType", + int(input.reject_condition), + ) + req.query.query_type = input.query + if input.args: + req.query.query_args.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + # TODO(cretz): Wrap error + resp = await self._client.service.query_workflow(req, retry=True) + if resp.HasField("query_rejected"): + raise WorkflowQueryRejectedError( + WorkflowExecutionStatus(resp.query_rejected.status) + if resp.query_rejected.status + else None + ) + if not resp.query_result.payloads: + return None + results = await self._client.data_converter.decode(resp.query_result.payloads) + if not results: + return None + elif len(results) > 1: + logger.warning("Expected single query result, got %s", len(results)) + return results[0] + + async def signal_workflow(self, input: SignalWorkflowInput) -> None: + req = temporalio.api.workflowservice.v1.SignalWorkflowExecutionRequest( + namespace=self._client.namespace, + workflow_execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id if input.run_id else "", + ), + signal_name=input.signal, + identity=self._client.identity, + request_id=str(uuid.uuid4()), + # TODO(cretz): Headers here and elsewhere + ) + if input.args: + req.input.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + await self._client.service.signal_workflow_execution(req, retry=True) + + async def terminate_workflow(self, input: TerminateWorkflowInput) -> None: + req = temporalio.api.workflowservice.v1.TerminateWorkflowExecutionRequest( + namespace=self._client.namespace, + workflow_execution=temporalio.api.common.v1.WorkflowExecution( + workflow_id=input.id, + run_id=input.run_id if input.run_id else "", + ), + reason=input.reason if input.reason else "", + identity=self._client.identity, + ) + if input.args: + req.details.payloads.extend( + await self._client.data_converter.encode(input.args) + ) + await self._client.service.terminate_workflow_execution(req, retry=True) + + +class WorkflowFailureError(Exception): + def __init__(self, *, cause: temporalio.failure.FailureError) -> None: + super().__init__("Workflow execution failed") + # TODO(cretz): Confirm setting this __cause__ is acceptable + self.__cause__ = cause + + +class WorkflowContinuedAsNewError(Exception): + def __init__(self, new_execution_run_id: str) -> None: + super().__init__("Workflow continued as new") + self._new_execution_run_id = new_execution_run_id + + +class WorkflowQueryRejectedError(Exception): + def __init__(self, status: Optional[WorkflowExecutionStatus]) -> None: + super().__init__(f"Query rejected, status: {status}") + self._status = status + + @property + def status(self) -> Optional[WorkflowExecutionStatus]: + return self._status diff --git a/temporalio/client/__init__.py b/temporalio/client/__init__.py deleted file mode 100644 index 8437f65c9..000000000 --- a/temporalio/client/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from .workflow_service import ( - WorkflowService, - WorkflowServiceCall, - WorkflowServiceRequest, - WorkflowServiceResponse, -) diff --git a/temporalio/client/client.py b/temporalio/client/client.py deleted file mode 100644 index 1adb6a102..000000000 --- a/temporalio/client/client.py +++ /dev/null @@ -1,122 +0,0 @@ - -from datetime import timedelta -from enum import Enum -import os -import socket -from typing import Any, Awaitable, Generic, Optional, TypeVar, Union, overload -import uuid -from temporalio.api.enums.v1.workflow_pb2 import WorkflowIdReusePolicy -import temporalio.client -import temporalio.api.common.v1 -import temporalio.api.enums.v1 -import temporalio.api.taskqueue.v1 -import temporalio.api.workflowservice.v1 -import temporalio.converter -import temporalio.util.proto -import temporalio - -class WorkflowIDReusePolicy(Enum): - ALLOW_DUPLICATE = temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE - ALLOW_DUPLICATE_FAILED_ONLY = temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY - REJECT_DUPLICATE = temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE - -class Client: - - @staticmethod - async def connect(target_url: str, *, namespace: str="default", data_converter: temporalio.converter.DataConverter = temporalio.converter.default()) -> "Client": - return Client(await temporalio.client.WorkflowService.connect(target_url), namespace=namespace, data_converter=data_converter) - - def __init__(self, service: temporalio.client.WorkflowService, *, namespace: str="default", data_converter: temporalio.converter.DataConverter = temporalio.converter.default()): - self._service = service - self._namespace = namespace - self._data_converter = data_converter - - @property - def service(self) -> temporalio.client.WorkflowService: - return self._service - - @property - def namespace(self) -> str: - return self._namespace - - @property - def data_converter(self) -> temporalio.converter.DataConverter: - return self._data_converter - - async def start_workflow( - self, - workflow: str, - *args: Any, - task_queue: str, - # TODO(cretz): Should we require this? - id: str = str(uuid.uuid4()), - execution_timeout: Optional[timedelta] = None, - run_timeout: Optional[timedelta] = None, - task_timeout: Optional[timedelta] = None, - identity: str = f"{os.getpid()}@{socket.gethostname()}", - id_reuse_policy: WorkflowIdReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: Optional[temporalio.RetryPolicy] = None, - cron_schedule: Optional[str] = None - # TODO(cretz): Signal with start - # start_signal: Optional[str] = None, - # start_signal_args: list[Any] = [], - # ... additional options omitted for brevity - ) -> temporalio.WorkflowHandle[Any]: - input = None - if len(args) > 0: - input = temporalio.api.common.v1.Payloads(payloads = await self._data_converter.encode(list(args))) - proto_retry_policy = None - if retry_policy is not None: - proto_retry_policy = retry_policy.to_proto() - req = temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest( - namespace=self._namespace, - workflow_id=id, - workflow_type=temporalio.api.common.v1.WorkflowType(name=workflow), - task_queue=temporalio.api.taskqueue.v1.TaskQueue(name=task_queue), - input=input, - workflow_execution_timeout=temporalio.util.proto.from_timedelta(execution_timeout), - workflow_run_timeout=temporalio.util.proto.from_timedelta(run_timeout), - workflow_task_timeout=temporalio.util.proto.from_timedelta(task_timeout), - identity=identity, - request_id=str(uuid.uuid4()), - workflow_id_reuse_policy = id_reuse_policy, - retry_policy = None if retry_policy is None else retry_policy.to_proto(), - cron_schedule: typing.Text = ..., - memo: typing.Optional[temporal.api.common.v1.message_pb2.Memo] = ..., - search_attributes: typing.Optional[temporal.api.common.v1.message_pb2.SearchAttributes] = ..., - header: typing.Optional[temporal.api.common.v1.message_pb2.Header] = ..., - ) - # TODO(cretz): The rest - pass - -T = TypeVar("T") - -class WorkflowHandle(Generic[T]): - def __init__( - self, - client: Client, - id: str, - *, - run_id: Optional[str] = None - ) -> None: - self._client = client - self._id = id - self._run_id = run_id - - async def result(self) -> T: - pass - - async def cancel(self): - pass - - async def describe(self) -> temporalio.WorkflowExecution: - pass - - async def query(self, name: str, *args: Any) -> Any: - pass - - async def signal(self, name: str, *args: Any): - pass - - async def terminate(self, *, reason: Optional[str] = None): - pass \ No newline at end of file diff --git a/temporalio/common/retry_policy.py b/temporalio/common/retry_policy.py index 8fd936444..7c4b1829d 100644 --- a/temporalio/common/retry_policy.py +++ b/temporalio/common/retry_policy.py @@ -14,5 +14,5 @@ class RetryPolicy: max_elapsed_time: Optional[timedelta] max_retries: int - def to_proto(self) -> temporalio.api.common.v1.RetryPolicy: - raise NotImplementedError \ No newline at end of file + def apply_to_proto(self, proto: temporalio.api.common.v1.RetryPolicy) -> None: + raise NotImplementedError diff --git a/temporalio/converter.py b/temporalio/converter.py index 1154fdf5f..a7f161789 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -4,7 +4,7 @@ import inspect import json from abc import ABC, abstractmethod -from typing import Any, List, Mapping, Optional, Type +from typing import Any, Iterable, List, Mapping, Optional, Type import google.protobuf.json_format import google.protobuf.message @@ -17,7 +17,9 @@ class DataConverter(ABC): """Base converter to/from multiple payloads/values.""" @abstractmethod - async def encode(self, values: List[Any]) -> List[temporalio.api.common.v1.Payload]: + async def encode( + self, values: Iterable[Any] + ) -> List[temporalio.api.common.v1.Payload]: """Encode values into payloads. Args: @@ -35,8 +37,8 @@ async def encode(self, values: List[Any]) -> List[temporalio.api.common.v1.Paylo @abstractmethod async def decode( self, - payloads: List[temporalio.api.common.v1.Payload], - type_hints: Optional[List[Type]], + payloads: Iterable[temporalio.api.common.v1.Payload], + type_hints: Optional[List[Type]] = None, ) -> List[Any]: """Decode payloads into values. @@ -128,7 +130,9 @@ def __init__(self, *converters: PayloadConverter) -> None: # Insertion order preserved here self.converters = {c.encoding.encode(): c for c in converters} - async def encode(self, values: List[Any]) -> List[temporalio.api.common.v1.Payload]: + async def encode( + self, values: Iterable[Any] + ) -> List[temporalio.api.common.v1.Payload]: """Encode values trying each converter. See base class. Always returns the same number of payloads as values. @@ -154,8 +158,8 @@ async def encode(self, values: List[Any]) -> List[temporalio.api.common.v1.Paylo async def decode( self, - payloads: List[temporalio.api.common.v1.Payload], - type_hints: Optional[List[Type]], + payloads: Iterable[temporalio.api.common.v1.Payload], + type_hints: Optional[List[Type]] = None, ) -> List[Any]: """Decode values trying each converter. diff --git a/temporalio/failure.py b/temporalio/failure.py new file mode 100644 index 000000000..7f2ffe4d3 --- /dev/null +++ b/temporalio/failure.py @@ -0,0 +1,61 @@ +from enum import IntEnum +from typing import Any, List, Optional + +import temporalio.api.enums.v1 +import temporalio.api.failure.v1 +import temporalio.converter + + +class FailureError(Exception): + @staticmethod + async def from_proto( + failure: temporalio.api.failure.v1.Failure, + data_converter: temporalio.converter.DataConverter, + ) -> "FailureError": + raise NotImplementedError + + def __init__(self, message: str, failure: "Failure") -> None: + super().__init__(message) + raise NotImplementedError + + +class Failure: + def __init__( + self, + *details: Any, + proto_failure: Optional[temporalio.api.failure.v1.Failure] = None + ) -> None: + raise NotImplementedError + + +class CancelledFailure(Failure): + def __init__(self, *details: Any) -> None: + super().__init__(*details) + raise NotImplementedError + + +class TerminatedFailure(Failure): + def __init__(self, *details: Any, reason: Optional[str]) -> None: + super().__init__(*details) + raise NotImplementedError + + +class TimeoutType(IntEnum): + START_TO_CLOSE = int( + temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_START_TO_CLOSE + ) + SCHEDULE_TO_START = int( + temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_START + ) + SCHEDULE_TO_CLOSE = int( + temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_SCHEDULE_TO_CLOSE + ) + HEARTBEAT = int(temporalio.api.enums.v1.TimeoutType.TIMEOUT_TYPE_HEARTBEAT) + + +class TimeoutFailure(Failure): + def __init__( + self, type: TimeoutType, last_heartbeat_details: Optional[List[Any]] = None + ) -> None: + super().__init__() + raise NotImplementedError diff --git a/temporalio/util/__init__.py b/temporalio/util/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/temporalio/util/proto.py b/temporalio/util/proto.py deleted file mode 100644 index ef4f1ca6b..000000000 --- a/temporalio/util/proto.py +++ /dev/null @@ -1,12 +0,0 @@ - -from datetime import timedelta -from typing import Optional -import google.protobuf.duration_pb2 - - -def optional_timedelta_to_duration(d: Optional[timedelta]) -> Optional[google.protobuf.duration_pb2.Duration]: - if d is None: - return None - ret = google.protobuf.duration_pb2.Duration() - ret.FromTimedelta(d) - return ret \ No newline at end of file diff --git a/temporalio/client/workflow_service.py b/temporalio/workflow_service.py similarity index 77% rename from temporalio/client/workflow_service.py rename to temporalio/workflow_service.py index 19b2f9ea3..72d9bdf17 100644 --- a/temporalio/client/workflow_service.py +++ b/temporalio/workflow_service.py @@ -24,192 +24,197 @@ def __init__(self) -> None: wsv1 = temporalio.api.workflowservice.v1 - self.count_workflow_executions = self.__new_call( + self.count_workflow_executions = self._new_call( "count_workflow_executions", wsv1.CountWorkflowExecutionsRequest, wsv1.CountWorkflowExecutionsResponse, ) - self.deprecate_namespace = self.__new_call( + self.deprecate_namespace = self._new_call( "deprecate_namespace", wsv1.DeprecateNamespaceRequest, wsv1.DeprecateNamespaceResponse, ) - self.describe_namespace = self.__new_call( + self.describe_namespace = self._new_call( "describe_namespace", wsv1.DescribeNamespaceRequest, wsv1.DescribeNamespaceResponse, ) - self.describe_task_queue = self.__new_call( + self.describe_task_queue = self._new_call( "describe_task_queue", wsv1.DescribeTaskQueueRequest, wsv1.DescribeTaskQueueResponse, ) - self.describe_workflow_execution = self.__new_call( + self.describe_workflow_execution = self._new_call( "describe_workflow_execution", wsv1.DescribeWorkflowExecutionRequest, wsv1.DescribeWorkflowExecutionResponse, ) - self.get_cluster_info = self.__new_call( + self.get_cluster_info = self._new_call( "get_cluster_info", wsv1.GetClusterInfoRequest, wsv1.GetClusterInfoResponse, ) - self.get_search_attributes = self.__new_call( + self.get_search_attributes = self._new_call( "get_search_attributes", wsv1.GetSearchAttributesRequest, wsv1.GetSearchAttributesResponse, ) - self.get_workflow_execution_history = self.__new_call( + self.get_workflow_execution_history = self._new_call( "get_workflow_execution_history", wsv1.GetWorkflowExecutionHistoryRequest, wsv1.GetWorkflowExecutionHistoryResponse, ) - self.list_archived_workflow_executions = self.__new_call( + self.list_archived_workflow_executions = self._new_call( "list_archived_workflow_executions", wsv1.ListArchivedWorkflowExecutionsRequest, wsv1.ListArchivedWorkflowExecutionsResponse, ) - self.list_closed_workflow_executions = self.__new_call( + self.list_closed_workflow_executions = self._new_call( "list_closed_workflow_executions", wsv1.ListClosedWorkflowExecutionsRequest, wsv1.ListClosedWorkflowExecutionsResponse, ) - self.list_namespaces = self.__new_call( + self.list_namespaces = self._new_call( "list_namespaces", wsv1.ListNamespacesRequest, wsv1.ListNamespacesResponse, ) - self.list_open_workflow_executions = self.__new_call( + self.list_open_workflow_executions = self._new_call( "list_open_workflow_executions", wsv1.ListOpenWorkflowExecutionsRequest, wsv1.ListOpenWorkflowExecutionsResponse, ) - self.list_task_queue_partitions = self.__new_call( + self.list_task_queue_partitions = self._new_call( "list_task_queue_partitions", wsv1.ListTaskQueuePartitionsRequest, wsv1.ListTaskQueuePartitionsResponse, ) - self.list_workflow_executions = self.__new_call( + self.list_workflow_executions = self._new_call( "list_workflow_executions", wsv1.ListWorkflowExecutionsRequest, wsv1.ListWorkflowExecutionsResponse, ) - self.poll_activity_task_queue = self.__new_call( + self.poll_activity_task_queue = self._new_call( "poll_activity_task_queue", wsv1.PollActivityTaskQueueRequest, wsv1.PollActivityTaskQueueResponse, ) - self.poll_workflow_task_queue = self.__new_call( + self.poll_workflow_task_queue = self._new_call( "poll_workflow_task_queue", wsv1.PollWorkflowTaskQueueRequest, wsv1.PollWorkflowTaskQueueResponse, ) - self.query_workflow = self.__new_call( + self.query_workflow = self._new_call( "query_workflow", wsv1.QueryWorkflowRequest, wsv1.QueryWorkflowResponse, ) - self.record_activity_task_heartbeat = self.__new_call( + self.record_activity_task_heartbeat = self._new_call( "record_activity_task_heartbeat", wsv1.RecordActivityTaskHeartbeatRequest, wsv1.RecordActivityTaskHeartbeatResponse, ) - self.record_activity_task_heartbeat_by_id = self.__new_call( + self.record_activity_task_heartbeat_by_id = self._new_call( "record_activity_task_heartbeat_by_id", wsv1.RecordActivityTaskHeartbeatByIdRequest, wsv1.RecordActivityTaskHeartbeatByIdResponse, ) - self.register_namespace = self.__new_call( + self.register_namespace = self._new_call( "register_namespace", wsv1.RegisterNamespaceRequest, wsv1.RegisterNamespaceResponse, ) - self.request_cancel_workflow_execution = self.__new_call( + self.request_cancel_workflow_execution = self._new_call( "request_cancel_workflow_execution", wsv1.RequestCancelWorkflowExecutionRequest, wsv1.RequestCancelWorkflowExecutionResponse, ) - self.reset_sticky_task_queue = self.__new_call( + self.reset_sticky_task_queue = self._new_call( "reset_sticky_task_queue", wsv1.ResetStickyTaskQueueRequest, wsv1.ResetStickyTaskQueueResponse, ) - self.reset_workflow_execution = self.__new_call( + self.reset_workflow_execution = self._new_call( "reset_workflow_execution", wsv1.ResetWorkflowExecutionRequest, wsv1.ResetWorkflowExecutionResponse, ) - self.respond_activity_task_canceled = self.__new_call( + self.respond_activity_task_canceled = self._new_call( "respond_activity_task_canceled", wsv1.RespondActivityTaskCanceledRequest, wsv1.RespondActivityTaskCanceledResponse, ) - self.respond_activity_task_canceled_by_id = self.__new_call( + self.respond_activity_task_canceled_by_id = self._new_call( "respond_activity_task_canceled_by_id", wsv1.RespondActivityTaskCanceledByIdRequest, wsv1.RespondActivityTaskCanceledByIdResponse, ) - self.respond_activity_task_completed = self.__new_call( + self.respond_activity_task_completed = self._new_call( "respond_activity_task_completed", wsv1.RespondActivityTaskCompletedRequest, wsv1.RespondActivityTaskCompletedResponse, ) - self.respond_activity_task_completed_by_id = self.__new_call( + self.respond_activity_task_completed_by_id = self._new_call( "respond_activity_task_completed_by_id", wsv1.RespondActivityTaskCompletedByIdRequest, wsv1.RespondActivityTaskCompletedByIdResponse, ) - self.respond_activity_task_failed = self.__new_call( + self.respond_activity_task_failed = self._new_call( "respond_activity_task_failed", wsv1.RespondActivityTaskFailedRequest, wsv1.RespondActivityTaskFailedResponse, ) - self.respond_activity_task_failed_by_id = self.__new_call( + self.respond_activity_task_failed_by_id = self._new_call( "respond_activity_task_failed_by_id", wsv1.RespondActivityTaskFailedByIdRequest, wsv1.RespondActivityTaskFailedByIdResponse, ) - self.respond_query_task_completed = self.__new_call( + self.respond_query_task_completed = self._new_call( "respond_query_task_completed", wsv1.RespondQueryTaskCompletedRequest, wsv1.RespondQueryTaskCompletedResponse, ) - self.respond_workflow_task_completed = self.__new_call( + self.respond_workflow_task_completed = self._new_call( "respond_workflow_task_completed", wsv1.RespondWorkflowTaskCompletedRequest, wsv1.RespondWorkflowTaskCompletedResponse, ) - self.respond_workflow_task_failed = self.__new_call( + self.respond_workflow_task_failed = self._new_call( "respond_workflow_task_failed", wsv1.RespondWorkflowTaskFailedRequest, wsv1.RespondWorkflowTaskFailedResponse, ) - self.scan_workflow_executions = self.__new_call( + self.scan_workflow_executions = self._new_call( "scan_workflow_executions", wsv1.ScanWorkflowExecutionsRequest, wsv1.ScanWorkflowExecutionsResponse, ) - self.signal_with_start_workflow_execution = self.__new_call( + self.signal_with_start_workflow_execution = self._new_call( "signal_with_start_workflow_execution", wsv1.SignalWithStartWorkflowExecutionRequest, wsv1.SignalWithStartWorkflowExecutionResponse, ) - self.signal_workflow_execution = self.__new_call( + self.signal_workflow_execution = self._new_call( "signal_workflow_execution", wsv1.SignalWorkflowExecutionRequest, wsv1.SignalWorkflowExecutionResponse, ) - self.terminate_workflow_execution = self.__new_call( + self.start_workflow_execution = self._new_call( + "start_workflow_execution", + wsv1.StartWorkflowExecutionRequest, + wsv1.StartWorkflowExecutionResponse, + ) + self.terminate_workflow_execution = self._new_call( "terminate_workflow_execution", wsv1.TerminateWorkflowExecutionRequest, wsv1.TerminateWorkflowExecutionResponse, ) - self.reset_workflow_execution = self.__new_call( + self.reset_workflow_execution = self._new_call( "reset_workflow_execution", wsv1.ResetWorkflowExecutionRequest, wsv1.ResetWorkflowExecutionResponse, ) - self.update_namespace = self.__new_call( + self.update_namespace = self._new_call( "update_namespace", wsv1.UpdateNamespaceRequest, wsv1.UpdateNamespaceResponse, @@ -226,7 +231,7 @@ async def _rpc_call( ) -> WorkflowServiceResponse: raise NotImplementedError - def __new_call( + def _new_call( self, name: str, req_type: Type[WorkflowServiceRequest], diff --git a/tests/api/test_grpc_stub.py b/tests/api/test_grpc_stub.py index 767e88ca7..2601cc887 100644 --- a/tests/api/test_grpc_stub.py +++ b/tests/api/test_grpc_stub.py @@ -7,7 +7,7 @@ class SimpleServer(temporalio.api.workflowservice.v1.WorkflowServiceServicer): - async def CountWorkflowExecutions( + async def CountWorkflowExecutions( # type: ignore # https://github.com/nipunn1313/mypy-protobuf/issues/216 self, request: temporalio.api.workflowservice.v1.CountWorkflowExecutionsRequest, context: grpc.aio.ServicerContext, diff --git a/tests/client/__init__.py b/tests/client/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/tests/client/test_workflow_service.py b/tests/client/test_workflow_service.py deleted file mode 100644 index 704241cfa..000000000 --- a/tests/client/test_workflow_service.py +++ /dev/null @@ -1,26 +0,0 @@ -import uuid - -import temporalio.api.common.v1 -import temporalio.api.taskqueue.v1 -import temporalio.api.workflowservice.v1 -import temporalio.client -import temporalio.converter - - -async def test_simple(): - service = await temporalio.client.WorkflowService.connect("http://localhost:7233") - task_queue = f"my-task-queue-{uuid.uuid4()}" - workflow_id = f"my-workflow-{uuid.uuid4()}" - resp = await service.start_workflow_execution( - temporalio.api.workflowservice.v1.StartWorkflowExecutionRequest( - namespace="default", - workflow_id=workflow_id, - workflow_type=temporalio.api.common.v1.WorkflowType(name="my-workflow"), - task_queue=temporalio.api.taskqueue.v1.TaskQueue(name=task_queue), - input=temporalio.api.common.v1.Payloads( - payloads=await temporalio.converter.default().encode(["some string!"]) - ), - request_id=str(uuid.uuid4()), - ) - ) - print(f"Started workflow with run ID: {resp.run_id}") diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 000000000..9f950c1b6 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,15 @@ +import uuid + +import temporalio.client + + +async def test_client_simple(): + client = await temporalio.client.Client.connect("http://localhost:7233") + handle = await client.start_workflow( + "my-workflow", + "arg1", + id=f"my-workflow-id-{uuid.uuid4}", + task_queue=f"my-workflow-id-{uuid.uuid4}", + ) + assert handle.run_id + print(f"Workflow created with run ID: {handle.run_id}") diff --git a/tests/converter_test.py b/tests/test_converter.py similarity index 100% rename from tests/converter_test.py rename to tests/test_converter.py From 9975139b7413a0ca26b3625258e79562ee4ca7db Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Fri, 4 Feb 2022 08:02:02 -0600 Subject: [PATCH 14/15] Update to latest sdk-core --- .gitmodules | 2 +- temporalio/bridge/sdk-core | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitmodules b/.gitmodules index d1c4f4d05..80965358f 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ [submodule "sdk-core"] path = temporalio/bridge/sdk-core - url = git@github.com:Sushisource/sdk-core.git + url = git@github.com:temporalio/sdk-core.git diff --git a/temporalio/bridge/sdk-core b/temporalio/bridge/sdk-core index 0fa511302..9db7b0544 160000 --- a/temporalio/bridge/sdk-core +++ b/temporalio/bridge/sdk-core @@ -1 +1 @@ -Subproject commit 0fa511302a814d9e70ccd3b1ce0c17a72a3cfb53 +Subproject commit 9db7b0544b301469dcedfffcdab86f602dae2a04 From 4be798b7ec4e9e01409a9eb74f1637e93e39e312 Mon Sep 17 00:00:00 2001 From: Chad Retz Date: Fri, 4 Feb 2022 17:02:01 -0600 Subject: [PATCH 15/15] Client work and docs scaffolding --- .gitignore | 4 +- docs/api.rst | 18 ++ docs/conf.py | 69 +++++ docs/direct_api.rst | 69 +++++ docs/index.rst | 23 ++ poetry.lock | 463 ++++++++++++++++++++++++++++++++- pyproject.toml | 13 +- temporalio/__init__.py | 1 - temporalio/bridge/src/lib.rs | 144 +++++++--- temporalio/client.py | 335 ++++++++++++++++++++++-- temporalio/common/__init__.py | 2 + temporalio/converter.py | 5 +- temporalio/workflow_service.py | 5 - 13 files changed, 1088 insertions(+), 63 deletions(-) create mode 100644 docs/api.rst create mode 100644 docs/conf.py create mode 100644 docs/direct_api.rst create mode 100644 docs/index.rst delete mode 100644 temporalio/__init__.py diff --git a/.gitignore b/.gitignore index 88e10b9ae..26d3d4997 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,9 @@ .venv +__pycache__ +/docs/_build temporalio/api/* !temporalio/api/__init__.py temporalio/bridge/proto/* !temporalio/bridge/proto/__init__.py temporalio/bridge/target/ -__pycache__ + diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 000000000..6d6322537 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,18 @@ +.. _api: + +API +=== + +.. module:: temporalio + +Client +------ + +.. automodule:: temporalio.client + :members: + +Converters +---------- + +.. automodule:: temporalio.converter + :members: diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 000000000..cf3bf98e2 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,69 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys + +sys.path.insert(0, os.path.abspath("../")) + + +# -- Project information ----------------------------------------------------- + +project = "Temporal Python SDK" +copyright = "2022, Temporal Technologies Inc" +author = "Temporal Technologies Inc" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.napoleon", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +intersphinx_mapping = { + "python": ("https://docs.python.org/3/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + +autodoc_docstring_signature = True + +autodoc_typehints = "description" + +autodoc_typehints_description_target = "documented" + +autodoc_preserve_defaults = True diff --git a/docs/direct_api.rst b/docs/direct_api.rst new file mode 100644 index 000000000..73d605647 --- /dev/null +++ b/docs/direct_api.rst @@ -0,0 +1,69 @@ +.. _direct_api: + +Direct gRPC API +=============== + +Raw gRPC Client +--------------- + +.. automodule:: temporalio.workflow_service + :members: + +Temporal API Objects +-------------------- + +.. automodule:: temporalio.api.common.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.command.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.enums.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.errordetails.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.failure.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.filter.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.history.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.namespace.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.query.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.replication.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.taskqueue.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.version.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.workflow.v1 + :members: + :imported-members: + +.. automodule:: temporalio.api.workflowservice.v1 + :members: + :imported-members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 000000000..7aed7c49c --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,23 @@ +.. Temporal Python SDK documentation master file, created by + sphinx-quickstart on Fri Feb 4 11:52:42 2022. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Temporal Python SDK +=================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + api + direct_api + + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/poetry.lock b/poetry.lock index b88635d32..83fa43598 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,11 @@ +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "atomicwrites" version = "1.4.0" @@ -20,6 +28,17 @@ docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +[[package]] +name = "babel" +version = "2.9.1" +description = "Internationalization utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[package.dependencies] +pytz = ">=2015.7" + [[package]] name = "black" version = "21.12b0" @@ -47,6 +66,25 @@ jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "charset-normalizer" +version = "2.0.11" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + [[package]] name = "click" version = "8.0.3" @@ -67,6 +105,14 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + [[package]] name = "grpcio" version = "1.43.0" @@ -93,6 +139,22 @@ python-versions = ">=3.6" grpcio = ">=1.43.0" protobuf = ">=3.5.0.post1,<4.0dev" +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.3.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + [[package]] name = "importlib-metadata" version = "4.10.1" @@ -132,6 +194,28 @@ requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] plugins = ["setuptools"] +[[package]] +name = "jinja2" +version = "3.0.3" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.6" + [[package]] name = "maturin" version = "0.12.6" @@ -227,6 +311,17 @@ importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pockets" +version = "0.9.1" +description = "A collection of helpful Python tools!" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = ">=1.5.2" + [[package]] name = "protobuf" version = "3.19.4" @@ -243,6 +338,28 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pygments" +version = "2.11.2" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.5" + [[package]] name = "pyparsing" version = "3.0.7" @@ -291,6 +408,32 @@ typing-extensions = {version = ">=4.0", markers = "python_version < \"3.8\""} [package.extras] testing = ["coverage (==6.2)", "hypothesis (>=5.7.1)", "flaky (>=3.5.0)", "mypy (==0.931)"] +[[package]] +name = "pytz" +version = "2021.3" +description = "World timezone definitions, modern and historical" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "requests" +version = "2.27.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + [[package]] name = "six" version = "1.16.0" @@ -299,6 +442,144 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sphinx" +version = "4.4.0" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] + +[[package]] +name = "sphinx-rtd-theme" +version = "1.0.0" +description = "Read the Docs theme for Sphinx" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" + +[package.dependencies] +docutils = "<0.18" +sphinx = ">=1.6" + +[package.extras] +dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest", "html5lib"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["pytest", "flake8", "mypy"] + +[[package]] +name = "sphinxcontrib-napoleon" +version = "0.7" +description = "Sphinx \"napoleon\" extension." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pockets = ">=0.3" +six = ">=1.5.2" + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + [[package]] name = "toml" version = "0.10.2" @@ -350,6 +631,19 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "urllib3" +version = "1.26.8" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + [[package]] name = "zipp" version = "3.7.0" @@ -365,9 +659,13 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = "^3.7" -content-hash = "6f01485d25e3ac67af3d750d5995d2d38ad074270853130194bf98d1c97f5cbd" +content-hash = "088af139ce74f15f34950abcf0f6d5a1333572d062bbcb23004580808ec18de0" [metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -376,10 +674,22 @@ attrs = [ {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, ] +babel = [ + {file = "Babel-2.9.1-py2.py3-none-any.whl", hash = "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9"}, + {file = "Babel-2.9.1.tar.gz", hash = "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0"}, +] black = [ {file = "black-21.12b0-py3-none-any.whl", hash = "sha256:a615e69ae185e08fdd73e4715e260e2479c861b5740057fde6e8b4e3b7dd589f"}, {file = "black-21.12b0.tar.gz", hash = "sha256:77b80f693a569e2e527958459634f18df9b0ba2625ba4e0c2d5da5be42e6f2b3"}, ] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, + {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, +] click = [ {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, @@ -388,6 +698,10 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +docutils = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] grpcio = [ {file = "grpcio-1.43.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:a4e786a8ee8b30b25d70ee52cda6d1dbba2a8ca2f1208d8e20ed8280774f15c8"}, {file = "grpcio-1.43.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:af9c3742f6c13575c0d4147a8454da0ff5308c4d9469462ff18402c6416942fe"}, @@ -480,6 +794,14 @@ grpcio-tools = [ {file = "grpcio_tools-1.43.0-cp39-cp39-win32.whl", hash = "sha256:ebfb94ddb454a6dc3a505d9531dc81c948e6364e181b8795bfad3f3f479974dc"}, {file = "grpcio_tools-1.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:d21928b680e6e29538688cffbf53f3d5a53cff0ec8f0c33139641700045bdf1a"}, ] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +imagesize = [ + {file = "imagesize-1.3.0-py2.py3-none-any.whl", hash = "sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c"}, + {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, +] importlib-metadata = [ {file = "importlib_metadata-4.10.1-py3-none-any.whl", hash = "sha256:899e2a40a8c4a1aec681feef45733de8a6c58f3f6a0dbed2eb6574b4387a77b6"}, {file = "importlib_metadata-4.10.1.tar.gz", hash = "sha256:951f0d8a5b7260e9db5e41d429285b5f451e928479f19d80818878527d36e95e"}, @@ -492,6 +814,81 @@ isort = [ {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] +jinja2 = [ + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, +] +markupsafe = [ + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] maturin = [ {file = "maturin-0.12.6-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:7c36e8ee53fb6f544d8f1b1b480035bf204806494be5aa44394a278b5cf9b522"}, {file = "maturin-0.12.6-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:90d6ed47fa51d902e0afba0b469652f76e1b0e799af2910712c5dec3d1343003"}, @@ -557,6 +954,10 @@ pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] +pockets = [ + {file = "pockets-0.9.1-py2.py3-none-any.whl", hash = "sha256:68597934193c08a08eb2bf6a1d85593f627c22f9b065cc727a4f03f669d96d86"}, + {file = "pockets-0.9.1.tar.gz", hash = "sha256:9320f1a3c6f7a9133fe3b571f283bcf3353cd70249025ae8d618e40e9f7e92b3"}, +] protobuf = [ {file = "protobuf-3.19.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f51d5a9f137f7a2cec2d326a74b6e3fc79d635d69ffe1b036d39fc7d75430d37"}, {file = "protobuf-3.19.4-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:09297b7972da685ce269ec52af761743714996b4381c085205914c41fcab59fb"}, @@ -589,6 +990,14 @@ py = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] +pydocstyle = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] +pygments = [ + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, +] pyparsing = [ {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, @@ -601,10 +1010,58 @@ pytest-asyncio = [ {file = "pytest-asyncio-0.17.2.tar.gz", hash = "sha256:6d895b02432c028e6957d25fc936494e78c6305736e785d9fee408b1efbc7ff4"}, {file = "pytest_asyncio-0.17.2-py3-none-any.whl", hash = "sha256:e0fe5dbea40516b661ef1bcfe0bd9461c2847c4ef4bb40012324f2454fb7d56d"}, ] +pytz = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +sphinx = [ + {file = "Sphinx-4.4.0-py3-none-any.whl", hash = "sha256:5da895959511473857b6d0200f56865ed62c31e8f82dd338063b84ec022701fe"}, + {file = "Sphinx-4.4.0.tar.gz", hash = "sha256:6caad9786055cb1fa22b4a365c1775816b876f91966481765d7d50e9f0dd35cc"}, +] +sphinx-rtd-theme = [ + {file = "sphinx_rtd_theme-1.0.0-py2.py3-none-any.whl", hash = "sha256:4d35a56f4508cfee4c4fb604373ede6feae2a306731d533f409ef5c3496fdbd8"}, + {file = "sphinx_rtd_theme-1.0.0.tar.gz", hash = "sha256:eec6d497e4c2195fa0e8b2016b337532b8a699a68bcb22a512870e16925c6a5c"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-napoleon = [ + {file = "sphinxcontrib-napoleon-0.7.tar.gz", hash = "sha256:407382beed396e9f2d7f3043fad6afda95719204a1e1a231ac865f40abcbfcf8"}, + {file = "sphinxcontrib_napoleon-0.7-py2.py3-none-any.whl", hash = "sha256:711e41a3974bdf110a484aec4c1a556799eb0b3f3b897521a018ad7e2db13fef"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -651,6 +1108,10 @@ typing-extensions = [ {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, ] +urllib3 = [ + {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, + {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, +] zipp = [ {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, diff --git a/pyproject.toml b/pyproject.toml index 6facdd052..269539543 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,15 +24,21 @@ isort = "^5.10.1" maturin = "^0.12.6" mypy = "^0.931" mypy-protobuf = "^3.2.0" +pydocstyle = "^6.1.1" pytest = "^6.2.5" pytest-asyncio = "^0.17.2" +Sphinx = "^4.4.0" +sphinx-rtd-theme = "^1.0.0" +sphinxcontrib-napoleon = "^0.7" [tool.poe.tasks] build = ["gen-protos", "build-bridge"] build-bridge = "python scripts/build-bridge.py" format = [{cmd = "black ."}, {cmd = "isort ."}] +gen-docs = "sphinx-build docs docs/_build" gen-protos = "python scripts/gen-protos.py" -lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}, "lint-types"] +lint = [{cmd = "black --check ."}, {cmd = "isort --check-only ."}, "lint-types"] # TODO(cretz): Add lint-docs +lint-docs = "pydocstyle" lint-types = "mypy ." test = "pytest" @@ -51,6 +57,11 @@ exclude = [ 'temporalio/bridge/proto', ] +[tool.pydocstyle] +convention = "google" +# https://github.com/PyCQA/pydocstyle/issues/363#issuecomment-625563088 +match_dir = "^(?!(docs|scripts|tests|api|proto|\\.)).*" + [build-system] build-backend = "poetry.core.masonry.api" requires = ["poetry-core>=1.0.0"] diff --git a/temporalio/__init__.py b/temporalio/__init__.py deleted file mode 100644 index f4c2c89ad..000000000 --- a/temporalio/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .common import RetryPolicy diff --git a/temporalio/bridge/src/lib.rs b/temporalio/bridge/src/lib.rs index a064232da..3b775c171 100644 --- a/temporalio/bridge/src/lib.rs +++ b/temporalio/bridge/src/lib.rs @@ -2,6 +2,7 @@ use pyo3::exceptions::{PyRuntimeError, PyValueError}; use pyo3::prelude::*; use std::collections::HashMap; use std::time::Duration; +use temporal_client::WorkflowService; use tonic; #[pymodule] @@ -11,9 +12,13 @@ fn temporal_sdk_bridge(_py: Python, m: &PyModule) -> PyResult<()> { Ok(()) } +type Client = temporal_client::RetryGateway< + temporal_client::ConfiguredClient, +>; + #[pyclass] pub struct ClientRef { - retry_client: std::sync::Arc>, + retry_client: Client, } #[derive(FromPyObject)] @@ -52,9 +57,9 @@ fn new_client(py: Python, opts: ClientOptions) -> PyResult<&PyAny> { let opts: temporal_client::ServerGatewayOptions = opts.try_into()?; pyo3_asyncio::tokio::future_into_py(py, async move { Ok(ClientRef { - retry_client: std::sync::Arc::new(opts.connect(None).await.map_err(|err| { + retry_client: opts.connect_no_namespace(None).await.map_err(|err| { PyRuntimeError::new_err(format!("Failed client connect: {}", err)) - })?), + })?, }) }) } @@ -68,15 +73,119 @@ impl ClientRef { retry: bool, req: Vec, ) -> PyResult<&'p PyAny> { - let retry_client = self.retry_client.clone(); + let mut retry_client = self.retry_client.clone(); pyo3_asyncio::tokio::future_into_py(py, async move { let bytes = match rpc.as_str() { + "count_workflow_executions" => { + rpc_call!(retry_client, retry, count_workflow_executions, req) + } + "deprecate_namespace" => rpc_call!(retry_client, retry, deprecate_namespace, req), + "describe_namespace" => rpc_call!(retry_client, retry, describe_namespace, req), + "describe_task_queue" => rpc_call!(retry_client, retry, describe_task_queue, req), + "describe_workflow_execution" => { + rpc_call!(retry_client, retry, describe_workflow_execution, req) + } + "get_cluster_info" => rpc_call!(retry_client, retry, get_cluster_info, req), + "get_search_attributes" => { + rpc_call!(retry_client, retry, get_search_attributes, req) + } "get_workflow_execution_history" => { rpc_call!(retry_client, retry, get_workflow_execution_history, req) } + "list_archived_workflow_executions" => { + rpc_call!(retry_client, retry, list_archived_workflow_executions, req) + } + "list_closed_workflow_executions" => { + rpc_call!(retry_client, retry, list_closed_workflow_executions, req) + } + "list_namespaces" => rpc_call!(retry_client, retry, list_namespaces, req), + "list_open_workflow_executions" => { + rpc_call!(retry_client, retry, list_open_workflow_executions, req) + } + "list_task_queue_partitions" => { + rpc_call!(retry_client, retry, list_task_queue_partitions, req) + } + "list_workflow_executions" => { + rpc_call!(retry_client, retry, list_workflow_executions, req) + } + "poll_activity_task_queue" => { + rpc_call!(retry_client, retry, poll_activity_task_queue, req) + } + "poll_workflow_task_queue" => { + rpc_call!(retry_client, retry, poll_workflow_task_queue, req) + } + "query_workflow" => rpc_call!(retry_client, retry, query_workflow, req), + "record_activity_task_heartbeat" => { + rpc_call!(retry_client, retry, record_activity_task_heartbeat, req) + } + "record_activity_task_heartbeat_by_id" => rpc_call!( + retry_client, + retry, + record_activity_task_heartbeat_by_id, + req + ), + "register_namespace" => rpc_call!(retry_client, retry, register_namespace, req), + "request_cancel_workflow_execution" => { + rpc_call!(retry_client, retry, request_cancel_workflow_execution, req) + } + "reset_sticky_task_queue" => { + rpc_call!(retry_client, retry, reset_sticky_task_queue, req) + } + "reset_workflow_execution" => { + rpc_call!(retry_client, retry, reset_workflow_execution, req) + } + "respond_activity_task_canceled" => { + rpc_call!(retry_client, retry, respond_activity_task_canceled, req) + } + "respond_activity_task_canceled_by_id" => rpc_call!( + retry_client, + retry, + respond_activity_task_canceled_by_id, + req + ), + "respond_activity_task_completed" => { + rpc_call!(retry_client, retry, respond_activity_task_completed, req) + } + "respond_activity_task_completed_by_id" => rpc_call!( + retry_client, + retry, + respond_activity_task_completed_by_id, + req + ), + "respond_activity_task_failed" => { + rpc_call!(retry_client, retry, respond_activity_task_failed, req) + } + "respond_activity_task_failed_by_id" => { + rpc_call!(retry_client, retry, respond_activity_task_failed_by_id, req) + } + "respond_query_task_completed" => { + rpc_call!(retry_client, retry, respond_query_task_completed, req) + } + "respond_workflow_task_completed" => { + rpc_call!(retry_client, retry, respond_workflow_task_completed, req) + } + "respond_workflow_task_failed" => { + rpc_call!(retry_client, retry, respond_workflow_task_failed, req) + } + "scan_workflow_executions" => { + rpc_call!(retry_client, retry, scan_workflow_executions, req) + } + "signal_with_start_workflow_execution" => rpc_call!( + retry_client, + retry, + signal_with_start_workflow_execution, + req + ), + "signal_workflow_execution" => { + rpc_call!(retry_client, retry, signal_workflow_execution, req) + } "start_workflow_execution" => { rpc_call!(retry_client, retry, start_workflow_execution, req) } + "terminate_workflow_execution" => { + rpc_call!(retry_client, retry, terminate_workflow_execution, req) + } + "update_namespace" => rpc_call!(retry_client, retry, update_namespace, req), _ => return Err(PyValueError::new_err(format!("Unknown RPC call {}", rpc))), }?; let bytes: &[u8] = &bytes; @@ -107,34 +216,13 @@ where } } -fn clone_tonic_req(req: &tonic::Request) -> tonic::Request { - tonic::Request::new(req.get_ref().clone()) -} - #[macro_export] macro_rules! rpc_call { ($retry_client:ident, $retry:ident, $call_name:ident, $req:ident) => { if $retry { - // TODO(cretz): I wouldn't have to clone this if call_with_retry - // allowed error types other than tonic statuses - let req = rpc_req($req)?; - let fact = || { - let req = clone_tonic_req(&req); - let mut raw_client = $retry_client.get_client().raw_client().clone(); - async move { raw_client.$call_name(req).await } - }; - rpc_resp( - $retry_client - .call_with_retry( - fact, - temporal_client::CallType::Normal, - stringify!($call_name), - ) - .await, - ) + rpc_resp($retry_client.$call_name(rpc_req($req)?).await) } else { - let mut raw_client = $retry_client.get_client().raw_client().clone(); - rpc_resp(raw_client.$call_name(rpc_req($req)?).await) + rpc_resp($retry_client.into_inner().$call_name(rpc_req($req)?).await) } }; } @@ -149,8 +237,6 @@ impl TryFrom for temporal_client::ServerGatewayOptions { url::Url::parse(&opts.target_url) .map_err(|err| PyValueError::new_err(format!("invalid target URL: {}", err)))?, ) - // TODO(cretz): Unneeded - .namespace("".to_string()) .client_name(opts.client_name) .client_version(opts.client_version) .static_headers(opts.static_headers) diff --git a/temporalio/client.py b/temporalio/client.py index 77bdead7f..068f328b7 100644 --- a/temporalio/client.py +++ b/temporalio/client.py @@ -1,3 +1,5 @@ +"""Client for accessing Temporal.""" + import logging import os import socket @@ -7,13 +9,13 @@ from enum import IntEnum from typing import Any, Generic, Iterable, Mapping, Optional, TypeVar, Union, cast -import temporalio import temporalio.api.common.v1 import temporalio.api.enums.v1 import temporalio.api.failure.v1 import temporalio.api.history.v1 import temporalio.api.taskqueue.v1 import temporalio.api.workflowservice.v1 +import temporalio.common import temporalio.converter import temporalio.failure import temporalio.workflow_service @@ -22,52 +24,98 @@ class WorkflowIDReusePolicy(IntEnum): + """How already-in-use workflow IDs are handled on start. + + See :py:class:`temporalio.api.enums.v1.WorkflowIdReusePolicy`. + """ + ALLOW_DUPLICATE = int( temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE`.""" + ALLOW_DUPLICATE_FAILED_ONLY = int( temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_ALLOW_DUPLICATE_FAILED_ONLY`.""" + REJECT_DUPLICATE = int( temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowIdReusePolicy.WORKFLOW_ID_REUSE_POLICY_REJECT_DUPLICATE`.""" class WorkflowQueryRejectCondition(IntEnum): + """Whether a query should be rejected in certain conditions. + + See :py:class:`temporalio.api.enums.v1.QueryRejectCondition`. + """ + NONE = int(temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NONE) + """See :py:attr:`temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NONE`.""" + NOT_OPEN = int( temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_OPEN ) + """See :py:attr:`temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_OPEN`.""" + NOT_COMPLETED_CLEANLY = int( temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_COMPLETED_CLEANLY ) + """See :py:attr:`temporalio.api.enums.v1.QueryRejectCondition.QUERY_REJECT_CONDITION_NOT_COMPLETED_CLEANLY`.""" class WorkflowExecutionStatus(IntEnum): + """Status of a workflow execution. + + See :py:class:`temporalio.api.enums.v1.WorkflowExecutionStatus`. + """ + RUNNING = int( temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_RUNNING ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_RUNNING`.""" + COMPLETED = int( temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_COMPLETED`.""" + FAILED = int( temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_FAILED`.""" + CANCELED = int( temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CANCELED ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CANCELED`.""" + TERMINATED = int( temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TERMINATED ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TERMINATED`.""" + CONTINUED_AS_NEW = int( temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_CONTINUED_AS_NEW`.""" + TIMED_OUT = int( temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TIMED_OUT ) + """See :py:attr:`temporalio.api.enums.v1.WorkflowExecutionStatus.WORKFLOW_EXECUTION_STATUS_TIMED_OUT`.""" class Client: + """Client for accessing Temporal. + + Most users will use :py:meth:`connect` to create a client. The + :py:attr:`service` property provides access to a raw gRPC client. To create + another client, like for a different namespace, :py:func:`Client` may be + directly instantiated with a :py:attr:`service` of another. + """ + @staticmethod async def connect( target_url: str, @@ -78,6 +126,20 @@ async def connect( interceptors: Iterable["Interceptor"] = [], workflow_query_reject_condition: Optional[WorkflowQueryRejectCondition] = None, ) -> "Client": + """Connect to a Temporal server. + + Args: + target_url: URL for the Temporal server. For local development, this + is often "http://localhost:7233". + namespace: Namespace to use for client calls. + identity: Identity to use for client calls. + data_converter: Data converter to use for all data conversions + to/from payloads. + interceptors: Set of interceptors that are chained together to allow + intercepting of client calls. The earlier interceptors wrap the + later ones. + workflow_query_reject_condition: When to reject a query. + """ return Client( await temporalio.workflow_service.WorkflowService.connect(target_url), namespace=namespace, @@ -97,6 +159,10 @@ def __init__( interceptors: Iterable["Interceptor"] = [], workflow_query_reject_condition: Optional[WorkflowQueryRejectCondition] = None, ): + """Create a Temporal client from a workflow service. + + See :py:meth:`connect` for details on the parameters. + """ self._service = service self._namespace = namespace self._identity = identity @@ -111,18 +177,22 @@ def __init__( @property def service(self) -> temporalio.workflow_service.WorkflowService: + """Raw gRPC service for this client.""" return self._service @property def namespace(self) -> str: + """Namespace used in calls by this client.""" return self._namespace @property def identity(self) -> str: + """Identity used in calls by this client.""" return self._identity @property def data_converter(self) -> temporalio.converter.DataConverter: + """Data converter used by this client.""" return self._data_converter async def start_workflow( @@ -135,7 +205,7 @@ async def start_workflow( run_timeout: Optional[timedelta] = None, task_timeout: Optional[timedelta] = None, id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: Optional[temporalio.RetryPolicy] = None, + retry_policy: Optional[temporalio.common.RetryPolicy] = None, cron_schedule: str = "", memo: Optional[Mapping[str, Any]] = None, search_attributes: Optional[Mapping[str, Any]] = None, @@ -143,6 +213,33 @@ async def start_workflow( start_signal: Optional[str] = None, start_signal_args: Iterable[Any] = [], ) -> "WorkflowHandle[Any]": + """Start a workflow and return its handle. + + Args: + workflow: Name of the workflow to start. + args: Arguments for the workflow if any. + id: Unique identifier for the workflow execution. + task_queue: Task queue to run the workflow on. + execution_timeout: Total workflow execution timeout including + retries and continue as new. + run_timeout: Timeout of a single workflow run. + task_timeout: Timeout of a single workflow task. + id_reuse_policy: How already-existing IDs are treated. + retry_policy: Retry policy for the workflow. + cron_schedule: See https://docs.temporal.io/docs/content/what-is-a-temporal-cron-job/ + memo: Memo for the workflow. + search_attributes: Search attributes for the workflow. + header: Header for the workflow. + start_signal: If present, this signal is sent as signal-with-start + instead of traditional workflow start. + start_signal_args: Arguments for start_signal if start_signal + present. + + Returns: + A workflow handle to the started/existing workflow. + :py:attr:`WorkflowHandle.run_id` will be populated with the current + run ID. + """ return await self._impl.start_workflow( StartWorkflowInput( workflow=workflow, @@ -173,7 +270,7 @@ async def execute_workflow( run_timeout: Optional[timedelta] = None, task_timeout: Optional[timedelta] = None, id_reuse_policy: WorkflowIDReusePolicy = WorkflowIDReusePolicy.ALLOW_DUPLICATE, - retry_policy: Optional[temporalio.RetryPolicy] = None, + retry_policy: Optional[temporalio.common.RetryPolicy] = None, cron_schedule: str = "", memo: Optional[Mapping[str, Any]] = None, search_attributes: Optional[Mapping[str, Any]] = None, @@ -181,6 +278,11 @@ async def execute_workflow( start_signal: Optional[str] = None, start_signal_args: list[Any] = [], ) -> Any: + """Start a workflow and wait for completion. + + This is a shortcut for :py:meth:`start_workflow` + + :py:meth:`WorkflowHandle.result`. + """ return await ( await self.start_workflow( workflow, @@ -202,8 +304,14 @@ async def execute_workflow( ).result() def get_workflow_handle( - self, workflow_id: str, run_id: Optional[str] = None + self, workflow_id: str, *, run_id: Optional[str] = None ) -> "WorkflowHandle[Any]": + """Get a workflow handle to an existing workflow by its ID. + + Args: + workflow_id: Workflow ID to get a handle to. + run_id: Run ID that will be used for all calls. + """ return WorkflowHandle(self, workflow_id, run_id=run_id) @@ -211,26 +319,58 @@ def get_workflow_handle( class WorkflowHandle(Generic[T]): + """Handle for interacting with a workflow. + + This is usually created via :py:meth:`Client.get_workflow_handle` or + returned from :py:meth:`Client.start_workflow`/:py:meth:`Client.execute_workflow`. + """ + + SELF_RUN_ID = "__" + def __init__( self, client: Client, id: str, *, run_id: Optional[str] = None ) -> None: + """Create workflow handle.""" self._client = client self._id = id self._run_id = run_id @property def id(self) -> str: + """ID for the workflow.""" return self._id @property def run_id(self) -> Optional[str]: + """Run ID used for calls on this handle if present.""" return self._run_id - async def result(self, *, follow_runs: bool = True) -> T: + async def result( + self, *, starting_run_id: Optional[str] = SELF_RUN_ID, follow_runs: bool = True + ) -> T: + """Wait for result of the workflow. + + Args: + starting_run_id: Run ID to fetch result for. Defaults to using + :py:meth:`run_id`. If set to None or there is no + :py:meth:`run_id`, this will get the latest result for the + workflow ID. + follow_runs: If true (default), workflow runs will be continually + fetched, until the most recent one is found. If false, the first + result is used. + + Returns: + Result of the workflow after being converted by the data converter. + + Raises: + Exception: Any failure of the workflow. + """ + if starting_run_id == WorkflowHandle.SELF_RUN_ID: + starting_run_id = self._run_id req = temporalio.api.workflowservice.v1.GetWorkflowExecutionHistoryRequest( namespace=self._client.namespace, execution=temporalio.api.common.v1.WorkflowExecution( - workflow_id=self._id, run_id=self._run_id or "" + workflow_id=self._id, run_id=starting_run_id or "" ), wait_new_event=True, history_event_filter_type=temporalio.api.enums.v1.HistoryEventFilterType.HISTORY_EVENT_FILTER_TYPE_CLOSE_EVENT, @@ -299,10 +439,10 @@ async def result(self, *, follow_runs: bool = True) -> T: ) raise WorkflowFailureError( cause=temporalio.failure.FailureError( - term_attr.reason if term_attr.reason else "Workflow terminated", + term_attr.reason or "Workflow terminated", temporalio.failure.TerminatedFailure( *details, - reason=term_attr.reason if term_attr.reason else None, + reason=term_attr.reason or None, ), ) ) @@ -338,59 +478,156 @@ async def result(self, *, follow_runs: bool = True) -> T: f"Unexpected close event attribute of {event.WhichOneof('attributes')}" ) - async def cancel(self) -> None: + async def cancel( + self, + *, + run_id: Optional[str] = SELF_RUN_ID, + first_execution_run_id: Optional[None], + ) -> None: + """Cancel the workflow. + + Args: + run_id: Run ID to cancel. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will cancel + the latest run for the workflow ID. + first_execution_run_id: First run ID that started the workflow. If + set, the cancellation makes sure that the workflow was started + with the given run ID. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id await self._client._impl.cancel_workflow( - CancelWorkflowInput(id=self._id, run_id=self._run_id) + CancelWorkflowInput( + id=self._id, + run_id=run_id, + first_execution_run_id=first_execution_run_id, + ) ) # TODO(cretz): Wrap the result in Python-friendlier type? async def describe( self, + *, + run_id: Optional[str] = SELF_RUN_ID, ) -> temporalio.api.workflowservice.v1.DescribeWorkflowExecutionResponse: + """Get workflow details. + + Args: + run_id: Run ID to describe. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will describe + the latest run for the workflow ID. + + Returns: + Workflow details. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id return await self._client.service.describe_workflow_execution( temporalio.api.workflowservice.v1.DescribeWorkflowExecutionRequest( namespace=self._client.namespace, execution=temporalio.api.common.v1.WorkflowExecution( workflow_id=self._id, - run_id=self._run_id if self._run_id else "", + run_id=run_id or "", ), ) ) - async def query(self, name: str, *args: Any) -> Any: + async def query( + self, name: str, *args: Any, run_id: Optional[str] = SELF_RUN_ID + ) -> Any: + """Query the workflow. + + Args: + name: Query name on the workflow. + args: Query arguments. + run_id: Run ID to query. Defaults to using :py:meth:`run_id`. If set + to None or there is no :py:meth:`run_id`, this will query the + latest run for the workflow ID. + + Returns: + Result of the query. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id return await self._client._impl.query_workflow( QueryWorkflowInput( id=self._id, - run_id=self._run_id, + run_id=run_id, query=name, args=args, reject_condition=self._client._workflow_query_reject_condition, ) ) - async def signal(self, name: str, *args: Any) -> None: + async def signal( + self, name: str, *args: Any, run_id: Optional[str] = SELF_RUN_ID + ) -> None: + """Send a signal to the workflow. + + Args: + name: Signal name on the workflow. + args: Signal arguments. + run_id: Run ID to signal. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will query + the latest run for the workflow ID. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id await self._client._impl.signal_workflow( SignalWorkflowInput( id=self._id, - run_id=self._run_id, + run_id=run_id, signal=name, args=args, ) ) - async def terminate(self, *args: Any, reason: Optional[str] = None) -> None: + async def terminate( + self, + *args: Any, + reason: Optional[str] = None, + run_id: Optional[str] = SELF_RUN_ID, + first_execution_run_id: Optional[None], + ) -> None: + """Terminate the workflow. + + Args: + args: Details to store on the termination. + reason: Reason for the termination. + run_id: Run ID to terminate. Defaults to using :py:meth:`run_id`. If + set to None or there is no :py:meth:`run_id`, this will + terminate the latest run for the workflow ID. + first_execution_run_id: First run ID that started the workflow. If + set, the termination makes sure that the workflow was started + with the given run ID. + + TODO(cretz): Raises + """ + if run_id == WorkflowHandle.SELF_RUN_ID: + run_id = self._run_id await self._client._impl.terminate_workflow( TerminateWorkflowInput( id=self._id, - run_id=self._run_id, + run_id=run_id, args=args, reason=reason, + first_execution_run_id=first_execution_run_id, ) ) @dataclass class StartWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.start_workflow`.""" + workflow: str args: Iterable[Any] id: str @@ -399,7 +636,7 @@ class StartWorkflowInput: run_timeout: Optional[timedelta] task_timeout: Optional[timedelta] id_reuse_policy: WorkflowIDReusePolicy - retry_policy: Optional[temporalio.RetryPolicy] + retry_policy: Optional[temporalio.common.RetryPolicy] cron_schedule: str memo: Optional[Mapping[str, Any]] search_attributes: Optional[Mapping[str, Any]] @@ -410,12 +647,17 @@ class StartWorkflowInput: @dataclass class CancelWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.cancel_workflow`.""" + id: str run_id: Optional[str] + first_execution_run_id: Optional[str] @dataclass class QueryWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.query_workflow`.""" + id: str run_id: Optional[str] query: str @@ -425,6 +667,8 @@ class QueryWorkflowInput: @dataclass class SignalWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.signal_workflow`.""" + id: str run_id: Optional[str] signal: str @@ -433,34 +677,67 @@ class SignalWorkflowInput: @dataclass class TerminateWorkflowInput: + """Input for :py:meth:`OutboundInterceptor.terminate_workflow`.""" + id: str run_id: Optional[str] + first_execution_run_id: Optional[str] args: Iterable[Any] reason: Optional[str] class Interceptor: + """Interceptor for clients. + + This should be extended by any client interceptors. + """ + def intercept_client(self, next: "OutboundInterceptor") -> "OutboundInterceptor": + """Method called for intercepting a client. + + Args: + next: The underlying outbound interceptor this interceptor should + delegate to. + + Returns: + The new interceptor that will be called for each client call. + """ return next class OutboundInterceptor: + """OutboundInterceptor for intercepting client calls. + + This should be extended by any client outbound interceptors. + """ + def __init__(self, next: "OutboundInterceptor") -> None: + """Create the outbound interceptor. + + Args: + next: The next interceptor in the chain. The default implementation + of all calls is to delegate to the next interceptor. + """ self.next = next async def start_workflow(self, input: StartWorkflowInput) -> WorkflowHandle[Any]: + """Called for every :py:meth:`Client.start_workflow` call.""" return await self.next.start_workflow(input) async def cancel_workflow(self, input: CancelWorkflowInput) -> None: + """Called for every :py:meth:`WorkflowHandle.cancel` call.""" await self.next.cancel_workflow(input) async def query_workflow(self, input: QueryWorkflowInput) -> Any: + """Called for every :py:meth:`WorkflowHandle.query` call.""" return await self.next.query_workflow(input) async def signal_workflow(self, input: SignalWorkflowInput) -> None: + """Called for every :py:meth:`WorkflowHandle.signal` call.""" await self.next.signal_workflow(input) async def terminate_workflow(self, input: TerminateWorkflowInput) -> None: + """Called for every :py:meth:`WorkflowHandle.terminate` call.""" await self.next.terminate_workflow(input) @@ -544,10 +821,11 @@ async def cancel_workflow(self, input: CancelWorkflowInput) -> None: namespace=self._client.namespace, workflow_execution=temporalio.api.common.v1.WorkflowExecution( workflow_id=input.id, - run_id=input.run_id if input.run_id else "", + run_id=input.run_id or "", ), identity=self._client.identity, request_id=str(uuid.uuid4()), + first_execution_run_id=input.first_execution_run_id or "", ), retry=True, ) @@ -557,7 +835,7 @@ async def query_workflow(self, input: QueryWorkflowInput) -> Any: namespace=self._client.namespace, execution=temporalio.api.common.v1.WorkflowExecution( workflow_id=input.id, - run_id=input.run_id if input.run_id else "", + run_id=input.run_id or "", ) # TODO(cretz): Headers here and elsewhere ) @@ -593,7 +871,7 @@ async def signal_workflow(self, input: SignalWorkflowInput) -> None: namespace=self._client.namespace, workflow_execution=temporalio.api.common.v1.WorkflowExecution( workflow_id=input.id, - run_id=input.run_id if input.run_id else "", + run_id=input.run_id or "", ), signal_name=input.signal, identity=self._client.identity, @@ -611,10 +889,11 @@ async def terminate_workflow(self, input: TerminateWorkflowInput) -> None: namespace=self._client.namespace, workflow_execution=temporalio.api.common.v1.WorkflowExecution( workflow_id=input.id, - run_id=input.run_id if input.run_id else "", + run_id=input.run_id or "", ), - reason=input.reason if input.reason else "", + reason=input.reason or "", identity=self._client.identity, + first_execution_run_id=input.first_execution_run_id or "", ) if input.args: req.details.payloads.extend( @@ -624,23 +903,33 @@ async def terminate_workflow(self, input: TerminateWorkflowInput) -> None: class WorkflowFailureError(Exception): + """Error that occurs when a workflow is unsuccessful.""" + def __init__(self, *, cause: temporalio.failure.FailureError) -> None: + """Create workflow failure error.""" super().__init__("Workflow execution failed") # TODO(cretz): Confirm setting this __cause__ is acceptable self.__cause__ = cause class WorkflowContinuedAsNewError(Exception): + """Error that occurs when a workflow was continued as new.""" + def __init__(self, new_execution_run_id: str) -> None: + """Create workflow continue as new error.""" super().__init__("Workflow continued as new") self._new_execution_run_id = new_execution_run_id class WorkflowQueryRejectedError(Exception): + """Error that occurs when a query was rejected.""" + def __init__(self, status: Optional[WorkflowExecutionStatus]) -> None: + """Create workflow query rejected error.""" super().__init__(f"Query rejected, status: {status}") self._status = status @property def status(self) -> Optional[WorkflowExecutionStatus]: + """Get workflow execution status causing rejection.""" return self._status diff --git a/temporalio/common/__init__.py b/temporalio/common/__init__.py index 34150a0e5..e1445662f 100644 --- a/temporalio/common/__init__.py +++ b/temporalio/common/__init__.py @@ -1 +1,3 @@ +"""Common utilities.""" + from .retry_policy import RetryPolicy diff --git a/temporalio/converter.py b/temporalio/converter.py index a7f161789..6f4bb2ba0 100644 --- a/temporalio/converter.py +++ b/temporalio/converter.py @@ -49,7 +49,7 @@ async def decode( present, it must have the exact same length as payloads even if the values are just "object". - Return: + Returns: Collection of Python values. Note, this does not have to be the same number as values given, but at least one must be present. @@ -208,6 +208,7 @@ async def decode( payload: temporalio.api.common.v1.Payload, type_hint: Optional[Type] = None, ) -> Any: + """See base class.""" if len(payload.data) > 0: raise RuntimeError("Expected empty data set for binary/null") return None @@ -332,7 +333,7 @@ class JSONPlainPayloadConverter(PayloadConverter): This supports all values that :py:func:`json.dump` supports and also adds encoding support for :py:mod:`dataclasses` by converting them using - :py:func:`dataclasses.asdict`. Note that on decode, if there is a type hint, + :py:mod:`dataclasses.asdict`. Note that on decode, if there is a type hint, it will be used to construct the data class. """ diff --git a/temporalio/workflow_service.py b/temporalio/workflow_service.py index 72d9bdf17..968cb3ce6 100644 --- a/temporalio/workflow_service.py +++ b/temporalio/workflow_service.py @@ -209,11 +209,6 @@ def __init__(self) -> None: wsv1.TerminateWorkflowExecutionRequest, wsv1.TerminateWorkflowExecutionResponse, ) - self.reset_workflow_execution = self._new_call( - "reset_workflow_execution", - wsv1.ResetWorkflowExecutionRequest, - wsv1.ResetWorkflowExecutionResponse, - ) self.update_namespace = self._new_call( "update_namespace", wsv1.UpdateNamespaceRequest,