From 0c6d375d217d707a3540bf72717da743317114d4 Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Tue, 15 Sep 2020 12:38:42 -0600 Subject: [PATCH 01/11] implement stateless function support... --- Description.md | 235 ---- LICENSE | 201 --- README.md | 28 - cloudstate/__init__.py | 10 - cloudstate/cloudstate.py | 77 -- cloudstate/contexts.py | 54 - cloudstate/discovery_servicer.py | 65 - cloudstate/entity_key_pb2.py | 44 - cloudstate/entity_key_pb2_grpc.py | 3 - cloudstate/entity_pb2.py | 733 ----------- cloudstate/entity_pb2_grpc.py | 67 - cloudstate/evensourced_servicer.py | 125 -- cloudstate/event_sourced_context.py | 43 - cloudstate/event_sourced_entity.py | 150 --- cloudstate/event_sourced_pb2.py | 403 ------ cloudstate/event_sourced_pb2_grpc.py | 55 - cloudstate/tests/test_cloudstate.py | 12 - cloudstate/version.py | 6 - docs/README.md | 23 - docs/build.sbt | 10 - docs/project/build.properties | 1 - docs/project/plugins.sbt | 2 - docs/src/main/paradox/gettingstarted.md | 11 - docs/src/main/paradox/index.md | 7 - .../shoppingcart/persistence/domain.proto | 25 - .../example/shoppingcart/shoppingcart.proto | 58 - protobuf/frontend/cloudstate/entity_key.proto | 29 - .../frontend/google/api/annotations.proto | 31 - protobuf/frontend/google/api/http.proto | 376 ------ protobuf/protocol/cloudstate/crdt.proto | 379 ------ protobuf/protocol/cloudstate/entity.proto | 190 --- .../protocol/cloudstate/event_sourced.proto | 114 -- protobuf/protocol/cloudstate/function.proto | 59 - .../grpc/reflection/v1alpha/reflection.proto | 136 --- .../grpc/reflection/v1alpha/reflection_pb2.py | 498 -------- .../reflection/v1alpha/reflection_pb2_grpc.py | 47 - requirements.txt | 8 - scripts/compile-protbuf.sh | 16 - scripts/fetch-cloudstate-pb.sh | 33 - setup.cfg | 33 - setup.py | 23 - shoppingcart/Dockerfile | 9 - shoppingcart/domain_pb2.py | 201 --- shoppingcart/requirements.txt | 6 - shoppingcart/shopping_cart.py | 13 - shoppingcart/shopping_cart_entity.py | 102 -- shoppingcart/shoppingcart_pb2.py | 319 ----- shoppingcart/shoppingcart_pb2_grpc.py | 81 -- tck.sh | 24 - tck/build_tck_docker_image.sh | 3 - tck/run_tck.sh | 32 - .../google/protobuf/descriptor.py | 1077 ----------------- 52 files changed, 6287 deletions(-) delete mode 100644 Description.md delete mode 100644 LICENSE delete mode 100644 README.md delete mode 100644 cloudstate/__init__.py delete mode 100644 cloudstate/cloudstate.py delete mode 100644 cloudstate/contexts.py delete mode 100755 cloudstate/discovery_servicer.py delete mode 100644 cloudstate/entity_key_pb2.py delete mode 100644 cloudstate/entity_key_pb2_grpc.py delete mode 100644 cloudstate/entity_pb2.py delete mode 100644 cloudstate/entity_pb2_grpc.py delete mode 100644 cloudstate/evensourced_servicer.py delete mode 100644 cloudstate/event_sourced_context.py delete mode 100644 cloudstate/event_sourced_entity.py delete mode 100644 cloudstate/event_sourced_pb2.py delete mode 100644 cloudstate/event_sourced_pb2_grpc.py delete mode 100644 cloudstate/tests/test_cloudstate.py delete mode 100644 cloudstate/version.py delete mode 100644 docs/README.md delete mode 100644 docs/build.sbt delete mode 100644 docs/project/build.properties delete mode 100644 docs/project/plugins.sbt delete mode 100644 docs/src/main/paradox/gettingstarted.md delete mode 100644 docs/src/main/paradox/index.md delete mode 100644 protobuf/example/shoppingcart/persistence/domain.proto delete mode 100644 protobuf/example/shoppingcart/shoppingcart.proto delete mode 100644 protobuf/frontend/cloudstate/entity_key.proto delete mode 100644 protobuf/frontend/google/api/annotations.proto delete mode 100644 protobuf/frontend/google/api/http.proto delete mode 100644 protobuf/protocol/cloudstate/crdt.proto delete mode 100644 protobuf/protocol/cloudstate/entity.proto delete mode 100644 protobuf/protocol/cloudstate/event_sourced.proto delete mode 100644 protobuf/protocol/cloudstate/function.proto delete mode 100644 protobuf/proxy/grpc/reflection/v1alpha/reflection.proto delete mode 100644 proxy/grpc/reflection/v1alpha/reflection_pb2.py delete mode 100644 proxy/grpc/reflection/v1alpha/reflection_pb2_grpc.py delete mode 100644 requirements.txt delete mode 100644 scripts/compile-protbuf.sh delete mode 100644 scripts/fetch-cloudstate-pb.sh delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 shoppingcart/Dockerfile delete mode 100644 shoppingcart/domain_pb2.py delete mode 100644 shoppingcart/requirements.txt delete mode 100644 shoppingcart/shopping_cart.py delete mode 100644 shoppingcart/shopping_cart_entity.py delete mode 100644 shoppingcart/shoppingcart_pb2.py delete mode 100644 shoppingcart/shoppingcart_pb2_grpc.py delete mode 100755 tck.sh delete mode 100755 tck/build_tck_docker_image.sh delete mode 100755 tck/run_tck.sh delete mode 100644 venv/lib/python3.7/site-packages/google/protobuf/descriptor.py diff --git a/Description.md b/Description.md deleted file mode 100644 index 1a6357d..0000000 --- a/Description.md +++ /dev/null @@ -1,235 +0,0 @@ - -Cloudstate is a specification, protocol, and reference implementation for providing distributed state management patterns suitable for **Serverless** computing. -The current supported and envisioned patterns include: - -* **Event Sourcing** -* **Conflict-Free Replicated Data Types (CRDTs)** -* **Key-Value storage** -* **P2P messaging** -* **CQRS read side projections** - -Cloudstate is polyglot, which means that services can be written in any language that supports gRPC, -and with language specific libraries provided that allow idiomatic use of the patterns in each language. -Cloudstate can be used either by itself, in combination with a Service Mesh, -or it is envisioned that it will be integrated with other Serverless technologies such as [Knative](https://knative.dev/). - -Read more about the design, architecture, techniques, and technologies behind Cloudstate in [this section in the documentation](https://github.com/cloudstateio/cloudstate/blob/master/README.md#enter-cloudstate). - -The Cloudstate Python user language support is a library that implements the Cloudstate protocol and offers an pythonistic API -for writing entities that implement the types supported by the Cloudstate protocol. - -The Cloudstate documentation can be found [here](https://cloudstate.io/docs/) - -## Install and update using pip: - -``` -pip install -U cloudstate -``` - -## A Simple EventSourced Example: - -### 1. Define your gRPC contract - -``` -// This is the public API offered by the shopping cart entity. -syntax = "proto3"; - -import "google/protobuf/empty.proto"; -import "cloudstate/entity_key.proto"; -import "google/api/annotations.proto"; -import "google/api/http.proto"; - -package com.example.shoppingcart; - -message AddLineItem { - string user_id = 1 [(.cloudstate.entity_key) = true]; - string product_id = 2; - string name = 3; - int32 quantity = 4; -} - -message RemoveLineItem { - string user_id = 1 [(.cloudstate.entity_key) = true]; - string product_id = 2; -} - -message GetShoppingCart { - string user_id = 1 [(.cloudstate.entity_key) = true]; -} - -message LineItem { - string product_id = 1; - string name = 2; - int32 quantity = 3; -} - -message Cart { - repeated LineItem items = 1; -} - -service ShoppingCart { - rpc AddItem(AddLineItem) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/cart/{user_id}/items/add", - body: "*", - }; - } - - rpc RemoveItem(RemoveLineItem) returns (google.protobuf.Empty) { - option (google.api.http).post = "/cart/{user_id}/items/{product_id}/remove"; - } - - rpc GetCart(GetShoppingCart) returns (Cart) { - option (google.api.http) = { - get: "/carts/{user_id}", - additional_bindings: { - get: "/carts/{user_id}/items", - response_body: "items" - } - }; - } -} - -``` - -### 2. Generate Python files - -It is necessary to compile your .proto files using the protoc compiler in order to generate Python files. -See [this official gRPC for Python quickstart](https://grpc.io/docs/languages/python/quickstart/) if you are not familiar with the gRPC protocol. - -Here is an example of how to compile the sample proto file: -``` -python -m grpc_tools.protoc -I../../protos --python_out=. --grpc_python_out=. ../../protos/shoppingcart.proto -``` - -### 3. Implement your business logic under an EventSourced Cloudstate Entity - -``` -from dataclasses import dataclass, field -from typing import MutableMapping - -from google.protobuf.empty_pb2 import Empty - -from cloudstate.event_sourced_context import EventSourcedCommandContext -from cloudstate.event_sourced_entity import EventSourcedEntity -from shoppingcart.domain_pb2 import (Cart as DomainCart, LineItem as DomainLineItem, ItemAdded, ItemRemoved) -from shoppingcart.shoppingcart_pb2 import (Cart, LineItem, AddLineItem, RemoveLineItem) -from shoppingcart.shoppingcart_pb2 import (_SHOPPINGCART, DESCRIPTOR as FILE_DESCRIPTOR) - - -@dataclass -class ShoppingCartState: - entity_id: str - cart: MutableMapping[str, LineItem] = field(default_factory=dict) - - -def init(entity_id: str) -> ShoppingCartState: - return ShoppingCartState(entity_id) - - -entity = EventSourcedEntity(_SHOPPINGCART, [FILE_DESCRIPTOR], init) - - -def to_domain_line_item(item): - domain_item = DomainLineItem() - domain_item.productId = item.product_id - domain_item.name = item.name - domain_item.quantity = item.quantity - return domain_item - - -@entity.snapshot() -def snapshot(state: ShoppingCartState): - cart = DomainCart() - cart.items = [to_domain_line_item(item) for item in state.cart.values()] - return cart - - -def to_line_item(domain_item): - item = LineItem() - item.product_id = domain_item.productId - item.name = domain_item.name - item.quantity = domain_item.quantity - return item - - -@entity.snapshot_handler() -def handle_snapshot(state: ShoppingCartState, domain_cart: DomainCart): - state.cart = {domain_item.productId: to_line_item(domain_item) for domain_item in domain_cart.items} - - -@entity.event_handler(ItemAdded) -def item_added(state: ShoppingCartState, event: ItemAdded): - cart = state.cart - if event.item.productId in cart: - item = cart[event.item.productId] - item.quantity = item.quantity + event.item.quantity - else: - item = to_line_item(event.item) - cart[item.product_id] = item - - -@entity.event_handler(ItemRemoved) -def item_removed(state: ShoppingCartState, event: ItemRemoved): - del state.cart[event.productId] - - -@entity.command_handler("GetCart") -def get_cart(state: ShoppingCartState): - cart = Cart() - cart.items.extend(state.cart.values()) - return cart - - -@entity.command_handler("AddItem") -def add_item(item: AddLineItem, ctx: EventSourcedCommandContext): - if item.quantity <= 0: - ctx.fail("Cannot add negative quantity of to item {}".format(item.productId)) - else: - item_added_event = ItemAdded() - item_added_event.item.CopyFrom(to_domain_line_item(item)) - ctx.emit(item_added_event) - return Empty() - - -@entity.command_handler("RemoveItem") -def remove_item(state: ShoppingCartState, item: RemoveLineItem, ctx: EventSourcedCommandContext): - cart = state.cart - if item.product_id not in cart: - ctx.fail("Cannot remove item {} because it is not in the cart.".format(item.productId)) - else: - item_removed_event = ItemRemoved() - item_removed_event.productId = item.product_id - ctx.emit(item_removed_event) - return Empty() -``` - -### 4. Register Entity - -``` -from cloudstate.cloudstate import CloudState -from shoppingcart.shopping_cart_entity import entity as shopping_cart_entity -import logging - -if __name__ == '__main__': - logging.basicConfig() - CloudState().register_event_sourced_entity(shopping_cart_entity).start() -``` - -### 5. Deployment - -Cloudstate runs on Docker and Kubernetes you need to package your application so that it works as a Docker container -and can deploy it together with Cloudstate Operator on Kubernetes, the details and examples of all of which can be found [here](https://code.visualstudio.com/docs/containers/quickstart-python), [here](https://github.com/cloudstateio/python-support/blob/master/shoppingcart/Dockerfile) and [here](https://cloudstate.io/docs/core/current/user/deployment/index.html). - -## Contributing - -For guidance on setting up a development environment and how to make a contribution to Cloudstate, -see the contributing [project page](https://github.com/cloudstateio/python-support) or consult an official documentation [here](https://cloudstate.io/docs/). - -## Links - -* [Website](https://cloudstate.io/) -* [Documentation](https://cloudstate.io/docs/) -* [Releases](https://pypi.org/project/cloudstate/) -* [Code](https://github.com/cloudstateio/python-support) -* [Issue tracker](https://github.com/cloudstateio/python-support/issues) diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 261eeb9..0000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/README.md b/README.md deleted file mode 100644 index c51f161..0000000 --- a/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Python User Language Support -Python User Language Support for [Cloudstate](https://github.com/cloudstateio/cloudstate). - -## Installation via source - -``` -> git clone https://github.com/cloudstateio/python-support.git -Cloning into 'python-support'... - -> cd python-support -> python3 -m venv ./venv -> source ./venv/bin/activate -> python --version -Python 3.7.3 -> pip --version -> pip install wheel -> pip install . -``` - -### generate installer -``` -python setup.py bdist_wheel -``` - -### local install -``` -python -m pip install dist/cloudstate-0.1.0-py3-none-any.whl -``` \ No newline at end of file diff --git a/cloudstate/__init__.py b/cloudstate/__init__.py deleted file mode 100644 index 1f9c3b8..0000000 --- a/cloudstate/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -from .version import __version__ - -__all__ = [ - '__version__', -] diff --git a/cloudstate/cloudstate.py b/cloudstate/cloudstate.py deleted file mode 100644 index c20bcb3..0000000 --- a/cloudstate/cloudstate.py +++ /dev/null @@ -1,77 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" -from typing import Optional - -from dataclasses import (dataclass, field) -from typing import List -import os - -from concurrent import futures -import grpc - -from cloudstate.evensourced_servicer import CloudStateEventSourcedServicer -from cloudstate.event_sourced_entity import EventSourcedEntity -from cloudstate.discovery_servicer import CloudStateEntityDiscoveryServicer -from cloudstate.entity_pb2_grpc import add_EntityDiscoveryServicer_to_server - -import logging -import multiprocessing - -from cloudstate.event_sourced_pb2_grpc import add_EventSourcedServicer_to_server - -@dataclass -class CloudState: - logging.basicConfig(format='%(asctime)s - %(filename)s - %(levelname)s: %(message)s', level=logging.INFO) - logging.root.setLevel(logging.NOTSET) - - __address: str = '' - __host = '127.0.0.1' - __port = '8080' - __workers = multiprocessing.cpu_count() - __event_sourced_entities: List[EventSourcedEntity] = field(default_factory=list) - - def host(self, address: str): - """Set the address of the network Host. - Default Address is 127.0.0.1. - """ - self.__host = address - return self - - def port(self, port: str): - """Set the address of the network Port. - Default Port is 8080. - """ - self.__port = port - return self - - def max_workers(self, workers: Optional[int] = multiprocessing.cpu_count()): - """Set the gRPC Server number of Workers. - Default is equal than number of CPU Cores in the machine. - """ - self.__workers = workers - return self - - def register_event_sourced_entity(self, entity: EventSourcedEntity): - """Registry the user EventSourced entity.""" - self.__event_sourced_entities.append(entity) - return self - - def start(self): - """Start the user function and gRPC Server.""" - - self.__address = '{}:{}'.format(os.environ.get('HOST', self.__host), os.environ.get('PORT', self.__port)) - - server = grpc.server(futures.ThreadPoolExecutor(max_workers=self.__workers)) - add_EntityDiscoveryServicer_to_server(CloudStateEntityDiscoveryServicer(self.__event_sourced_entities), server) - add_EventSourcedServicer_to_server(CloudStateEventSourcedServicer(self.__event_sourced_entities), server) - - logging.info('Starting Cloudstate on address %s', self.__address) - try: - server.add_insecure_port(self.__address) - server.start() - except IOError as e: - logging.error('Error on start Cloudstate %s', e.__cause__) - - server.wait_for_termination() diff --git a/cloudstate/contexts.py b/cloudstate/contexts.py deleted file mode 100644 index 6860216..0000000 --- a/cloudstate/contexts.py +++ /dev/null @@ -1,54 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -from dataclasses import dataclass, field -from typing import List - -from cloudstate.entity_pb2 import ClientAction, Failure, Reply, Forward, SideEffect - - -class Context: - """Root class of all contexts.""" - pass - - -class ClientActionContext(Context): - """Context that provides client actions, which include failing and forwarding. - These contexts are typically made available in response to commands.""" - - def __init__(self,command_id: int): - self.command_id: int = command_id - self.errors: List[str] = [] - self.effects:List[SideEffect] = [] - self.forward: Forward = None - - def fail(self, error_message: str): - """Fail the command with the given message""" - self.errors.append(error_message) - - def has_errors(self): - return len(self.errors) > 0 - - def create_client_action(self, result, allow_reply): - client_action = ClientAction() - if self.has_errors(): - failure = Failure() - failure.command_id = self.command_id - failure.description = str(self.errors) - client_action.failure.CopyFrom(failure) - elif result: - if self.forward: - raise Exception("Both a reply was returned, and a forward message was sent, choose one or the other.") - else: - reply = Reply() - reply.payload.Pack(result) - client_action.reply.CopyFrom(reply) - elif self.forward: - client_action.forward.CopyFrom(self.forward) - elif allow_reply: - return None - else: - raise Exception("No reply or forward returned by command handler!") - return client_action diff --git a/cloudstate/discovery_servicer.py b/cloudstate/discovery_servicer.py deleted file mode 100755 index ac3e9e7..0000000 --- a/cloudstate/discovery_servicer.py +++ /dev/null @@ -1,65 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -import platform -from dataclasses import dataclass -from pprint import pprint -from typing import List - -from google.protobuf.descriptor_pb2 import FileDescriptorSet, FileDescriptorProto -from google.protobuf.descriptor_pool import Default - -from cloudstate import entity_pb2 -from cloudstate.entity_pb2_grpc import EntityDiscoveryServicer -from cloudstate.event_sourced_entity import EventSourcedEntity - - -@dataclass -class CloudStateEntityDiscoveryServicer(EntityDiscoveryServicer): - event_sourced_entities: List[EventSourcedEntity] - - def discover(self, request, context): - pprint(request) - descriptor_set = FileDescriptorSet() - for entity in self.event_sourced_entities: - for descriptor in entity.file_descriptors: - descriptor_set.file.append(FileDescriptorProto.FromString(descriptor.serialized_pb)) - descriptor_set.file.append( - FileDescriptorProto.FromString(Default().FindFileByName('google/protobuf/empty.proto').serialized_pb) - ) - descriptor_set.file.append( - FileDescriptorProto.FromString(Default().FindFileByName('cloudstate/entity_key.proto').serialized_pb) - ) - descriptor_set.file.append( - FileDescriptorProto.FromString(Default().FindFileByName('google/protobuf/descriptor.proto').serialized_pb) - ) - descriptor_set.file.append( - FileDescriptorProto.FromString(Default().FindFileByName('google/api/annotations.proto').serialized_pb) - ) - descriptor_set.file.append( - FileDescriptorProto.FromString(Default().FindFileByName('google/api/http.proto').serialized_pb) - ) - spec = entity_pb2.EntitySpec( - service_info=entity_pb2.ServiceInfo( - service_version='0.1.0', - service_runtime='Python ' + platform.python_version() + ' [' + platform.python_implementation() + ' ' + - platform.python_compiler() + ']', - support_library_name='cloudstate-python-support', - support_library_version='0.1.0' - ), - entities=[ - entity_pb2.Entity( - entity_type=entity.entity_type(), - service_name=entity.service_descriptor.full_name, - persistence_id=entity.persistence_id, - ) - for entity in self.event_sourced_entities], - proto=descriptor_set.SerializeToString() - ) - return spec - - def reportError(self, request, context): - pprint(request) - return diff --git a/cloudstate/entity_key_pb2.py b/cloudstate/entity_key_pb2.py deleted file mode 100644 index 051e930..0000000 --- a/cloudstate/entity_key_pb2.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: cloudstate/entity_key.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='cloudstate/entity_key.proto', - package='cloudstate', - syntax='proto3', - serialized_options=_b('\n\rio.cloudstate'), - serialized_pb=_b('\n\x1b\x63loudstate/entity_key.proto\x12\ncloudstate\x1a google/protobuf/descriptor.proto:3\n\nentity_key\x12\x1d.google.protobuf.FieldOptions\x18\xd2\x86\x03 \x01(\x08\x42\x0f\n\rio.cloudstateb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - -ENTITY_KEY_FIELD_NUMBER = 50002 -entity_key = _descriptor.FieldDescriptor( - name='entity_key', full_name='cloudstate.entity_key', index=0, - number=50002, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=True, extension_scope=None, - serialized_options=None, file=DESCRIPTOR) - -DESCRIPTOR.extensions_by_name['entity_key'] = entity_key -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(entity_key) - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/cloudstate/entity_key_pb2_grpc.py b/cloudstate/entity_key_pb2_grpc.py deleted file mode 100644 index a894352..0000000 --- a/cloudstate/entity_key_pb2_grpc.py +++ /dev/null @@ -1,3 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - diff --git a/cloudstate/entity_pb2.py b/cloudstate/entity_pb2.py deleted file mode 100644 index 6a7e7ee..0000000 --- a/cloudstate/entity_pb2.py +++ /dev/null @@ -1,733 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: cloudstate/entity.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='cloudstate/entity.proto', - package='cloudstate', - syntax='proto3', - serialized_options=_b('\n\026io.cloudstate.protocol'), - serialized_pb=_b('\n\x17\x63loudstate/entity.proto\x12\ncloudstate\x1a\x19google/protobuf/any.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/descriptor.proto\".\n\x05Reply\x12%\n\x07payload\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\"\\\n\x07\x46orward\x12\x14\n\x0cservice_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ommand_name\x18\x02 \x01(\t\x12%\n\x07payload\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\"\x8c\x01\n\x0c\x43lientAction\x12\"\n\x05reply\x18\x01 \x01(\x0b\x32\x11.cloudstate.ReplyH\x00\x12&\n\x07\x66orward\x18\x02 \x01(\x0b\x32\x13.cloudstate.ForwardH\x00\x12&\n\x07\x66\x61ilure\x18\x03 \x01(\x0b\x32\x13.cloudstate.FailureH\x00\x42\x08\n\x06\x61\x63tion\"t\n\nSideEffect\x12\x14\n\x0cservice_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ommand_name\x18\x02 \x01(\t\x12%\n\x07payload\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x13\n\x0bsynchronous\x18\x04 \x01(\x08\"o\n\x07\x43ommand\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\x03\x12\x0c\n\x04name\x18\x03 \x01(\t\x12%\n\x07payload\x18\x04 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x10\n\x08streamed\x18\x05 \x01(\x08\"0\n\x0fStreamCancelled\x12\x11\n\tentity_id\x18\x01 \x01(\t\x12\n\n\x02id\x18\x02 \x01(\x03\"2\n\x07\x46\x61ilure\x12\x12\n\ncommand_id\x18\x01 \x01(\x03\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\"p\n\nEntitySpec\x12\r\n\x05proto\x18\x01 \x01(\x0c\x12$\n\x08\x65ntities\x18\x02 \x03(\x0b\x32\x12.cloudstate.Entity\x12-\n\x0cservice_info\x18\x03 \x01(\x0b\x32\x17.cloudstate.ServiceInfo\"\x94\x01\n\x0bServiceInfo\x12\x14\n\x0cservice_name\x18\x01 \x01(\t\x12\x17\n\x0fservice_version\x18\x02 \x01(\t\x12\x17\n\x0fservice_runtime\x18\x03 \x01(\t\x12\x1c\n\x14support_library_name\x18\x04 \x01(\t\x12\x1f\n\x17support_library_version\x18\x05 \x01(\t\"K\n\x06\x45ntity\x12\x13\n\x0b\x65ntity_type\x18\x01 \x01(\t\x12\x14\n\x0cservice_name\x18\x02 \x01(\t\x12\x16\n\x0epersistence_id\x18\x03 \x01(\t\"$\n\x11UserFunctionError\x12\x0f\n\x07message\x18\x01 \x01(\t\"\x96\x01\n\tProxyInfo\x12\x1e\n\x16protocol_major_version\x18\x01 \x01(\x05\x12\x1e\n\x16protocol_minor_version\x18\x02 \x01(\x05\x12\x12\n\nproxy_name\x18\x03 \x01(\t\x12\x15\n\rproxy_version\x18\x04 \x01(\t\x12\x1e\n\x16supported_entity_types\x18\x05 \x03(\t2\x96\x01\n\x0f\x45ntityDiscovery\x12;\n\x08\x64iscover\x12\x15.cloudstate.ProxyInfo\x1a\x16.cloudstate.EntitySpec\"\x00\x12\x46\n\x0breportError\x12\x1d.cloudstate.UserFunctionError\x1a\x16.google.protobuf.Empty\"\x00\x42\x18\n\x16io.cloudstate.protocolb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,]) - - - - -_REPLY = _descriptor.Descriptor( - name='Reply', - full_name='cloudstate.Reply', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='payload', full_name='cloudstate.Reply.payload', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=129, - serialized_end=175, -) - - -_FORWARD = _descriptor.Descriptor( - name='Forward', - full_name='cloudstate.Forward', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='service_name', full_name='cloudstate.Forward.service_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='command_name', full_name='cloudstate.Forward.command_name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='payload', full_name='cloudstate.Forward.payload', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=177, - serialized_end=269, -) - - -_CLIENTACTION = _descriptor.Descriptor( - name='ClientAction', - full_name='cloudstate.ClientAction', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='reply', full_name='cloudstate.ClientAction.reply', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='forward', full_name='cloudstate.ClientAction.forward', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='failure', full_name='cloudstate.ClientAction.failure', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='action', full_name='cloudstate.ClientAction.action', - index=0, containing_type=None, fields=[]), - ], - serialized_start=272, - serialized_end=412, -) - - -_SIDEEFFECT = _descriptor.Descriptor( - name='SideEffect', - full_name='cloudstate.SideEffect', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='service_name', full_name='cloudstate.SideEffect.service_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='command_name', full_name='cloudstate.SideEffect.command_name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='payload', full_name='cloudstate.SideEffect.payload', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='synchronous', full_name='cloudstate.SideEffect.synchronous', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=414, - serialized_end=530, -) - - -_COMMAND = _descriptor.Descriptor( - name='Command', - full_name='cloudstate.Command', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entity_id', full_name='cloudstate.Command.entity_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='id', full_name='cloudstate.Command.id', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='name', full_name='cloudstate.Command.name', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='payload', full_name='cloudstate.Command.payload', index=3, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='streamed', full_name='cloudstate.Command.streamed', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=532, - serialized_end=643, -) - - -_STREAMCANCELLED = _descriptor.Descriptor( - name='StreamCancelled', - full_name='cloudstate.StreamCancelled', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entity_id', full_name='cloudstate.StreamCancelled.entity_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='id', full_name='cloudstate.StreamCancelled.id', index=1, - number=2, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=645, - serialized_end=693, -) - - -_FAILURE = _descriptor.Descriptor( - name='Failure', - full_name='cloudstate.Failure', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='command_id', full_name='cloudstate.Failure.command_id', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='description', full_name='cloudstate.Failure.description', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=695, - serialized_end=745, -) - - -_ENTITYSPEC = _descriptor.Descriptor( - name='EntitySpec', - full_name='cloudstate.EntitySpec', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='proto', full_name='cloudstate.EntitySpec.proto', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entities', full_name='cloudstate.EntitySpec.entities', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='service_info', full_name='cloudstate.EntitySpec.service_info', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=747, - serialized_end=859, -) - - -_SERVICEINFO = _descriptor.Descriptor( - name='ServiceInfo', - full_name='cloudstate.ServiceInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='service_name', full_name='cloudstate.ServiceInfo.service_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='service_version', full_name='cloudstate.ServiceInfo.service_version', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='service_runtime', full_name='cloudstate.ServiceInfo.service_runtime', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='support_library_name', full_name='cloudstate.ServiceInfo.support_library_name', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='support_library_version', full_name='cloudstate.ServiceInfo.support_library_version', index=4, - number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=862, - serialized_end=1010, -) - - -_ENTITY = _descriptor.Descriptor( - name='Entity', - full_name='cloudstate.Entity', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='entity_type', full_name='cloudstate.Entity.entity_type', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='service_name', full_name='cloudstate.Entity.service_name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='persistence_id', full_name='cloudstate.Entity.persistence_id', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1012, - serialized_end=1087, -) - - -_USERFUNCTIONERROR = _descriptor.Descriptor( - name='UserFunctionError', - full_name='cloudstate.UserFunctionError', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='message', full_name='cloudstate.UserFunctionError.message', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1089, - serialized_end=1125, -) - - -_PROXYINFO = _descriptor.Descriptor( - name='ProxyInfo', - full_name='cloudstate.ProxyInfo', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='protocol_major_version', full_name='cloudstate.ProxyInfo.protocol_major_version', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='protocol_minor_version', full_name='cloudstate.ProxyInfo.protocol_minor_version', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='proxy_name', full_name='cloudstate.ProxyInfo.proxy_name', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='proxy_version', full_name='cloudstate.ProxyInfo.proxy_version', index=3, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='supported_entity_types', full_name='cloudstate.ProxyInfo.supported_entity_types', index=4, - number=5, type=9, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1128, - serialized_end=1278, -) - -_REPLY.fields_by_name['payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_FORWARD.fields_by_name['payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_CLIENTACTION.fields_by_name['reply'].message_type = _REPLY -_CLIENTACTION.fields_by_name['forward'].message_type = _FORWARD -_CLIENTACTION.fields_by_name['failure'].message_type = _FAILURE -_CLIENTACTION.oneofs_by_name['action'].fields.append( - _CLIENTACTION.fields_by_name['reply']) -_CLIENTACTION.fields_by_name['reply'].containing_oneof = _CLIENTACTION.oneofs_by_name['action'] -_CLIENTACTION.oneofs_by_name['action'].fields.append( - _CLIENTACTION.fields_by_name['forward']) -_CLIENTACTION.fields_by_name['forward'].containing_oneof = _CLIENTACTION.oneofs_by_name['action'] -_CLIENTACTION.oneofs_by_name['action'].fields.append( - _CLIENTACTION.fields_by_name['failure']) -_CLIENTACTION.fields_by_name['failure'].containing_oneof = _CLIENTACTION.oneofs_by_name['action'] -_SIDEEFFECT.fields_by_name['payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_COMMAND.fields_by_name['payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_ENTITYSPEC.fields_by_name['entities'].message_type = _ENTITY -_ENTITYSPEC.fields_by_name['service_info'].message_type = _SERVICEINFO -DESCRIPTOR.message_types_by_name['Reply'] = _REPLY -DESCRIPTOR.message_types_by_name['Forward'] = _FORWARD -DESCRIPTOR.message_types_by_name['ClientAction'] = _CLIENTACTION -DESCRIPTOR.message_types_by_name['SideEffect'] = _SIDEEFFECT -DESCRIPTOR.message_types_by_name['Command'] = _COMMAND -DESCRIPTOR.message_types_by_name['StreamCancelled'] = _STREAMCANCELLED -DESCRIPTOR.message_types_by_name['Failure'] = _FAILURE -DESCRIPTOR.message_types_by_name['EntitySpec'] = _ENTITYSPEC -DESCRIPTOR.message_types_by_name['ServiceInfo'] = _SERVICEINFO -DESCRIPTOR.message_types_by_name['Entity'] = _ENTITY -DESCRIPTOR.message_types_by_name['UserFunctionError'] = _USERFUNCTIONERROR -DESCRIPTOR.message_types_by_name['ProxyInfo'] = _PROXYINFO -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Reply = _reflection.GeneratedProtocolMessageType('Reply', (_message.Message,), { - 'DESCRIPTOR' : _REPLY, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.Reply) - }) -_sym_db.RegisterMessage(Reply) - -Forward = _reflection.GeneratedProtocolMessageType('Forward', (_message.Message,), { - 'DESCRIPTOR' : _FORWARD, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.Forward) - }) -_sym_db.RegisterMessage(Forward) - -ClientAction = _reflection.GeneratedProtocolMessageType('ClientAction', (_message.Message,), { - 'DESCRIPTOR' : _CLIENTACTION, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.ClientAction) - }) -_sym_db.RegisterMessage(ClientAction) - -SideEffect = _reflection.GeneratedProtocolMessageType('SideEffect', (_message.Message,), { - 'DESCRIPTOR' : _SIDEEFFECT, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.SideEffect) - }) -_sym_db.RegisterMessage(SideEffect) - -Command = _reflection.GeneratedProtocolMessageType('Command', (_message.Message,), { - 'DESCRIPTOR' : _COMMAND, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.Command) - }) -_sym_db.RegisterMessage(Command) - -StreamCancelled = _reflection.GeneratedProtocolMessageType('StreamCancelled', (_message.Message,), { - 'DESCRIPTOR' : _STREAMCANCELLED, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.StreamCancelled) - }) -_sym_db.RegisterMessage(StreamCancelled) - -Failure = _reflection.GeneratedProtocolMessageType('Failure', (_message.Message,), { - 'DESCRIPTOR' : _FAILURE, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.Failure) - }) -_sym_db.RegisterMessage(Failure) - -EntitySpec = _reflection.GeneratedProtocolMessageType('EntitySpec', (_message.Message,), { - 'DESCRIPTOR' : _ENTITYSPEC, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.EntitySpec) - }) -_sym_db.RegisterMessage(EntitySpec) - -ServiceInfo = _reflection.GeneratedProtocolMessageType('ServiceInfo', (_message.Message,), { - 'DESCRIPTOR' : _SERVICEINFO, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.ServiceInfo) - }) -_sym_db.RegisterMessage(ServiceInfo) - -Entity = _reflection.GeneratedProtocolMessageType('Entity', (_message.Message,), { - 'DESCRIPTOR' : _ENTITY, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.Entity) - }) -_sym_db.RegisterMessage(Entity) - -UserFunctionError = _reflection.GeneratedProtocolMessageType('UserFunctionError', (_message.Message,), { - 'DESCRIPTOR' : _USERFUNCTIONERROR, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.UserFunctionError) - }) -_sym_db.RegisterMessage(UserFunctionError) - -ProxyInfo = _reflection.GeneratedProtocolMessageType('ProxyInfo', (_message.Message,), { - 'DESCRIPTOR' : _PROXYINFO, - '__module__' : 'cloudstate.entity_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.ProxyInfo) - }) -_sym_db.RegisterMessage(ProxyInfo) - - -DESCRIPTOR._options = None - -_ENTITYDISCOVERY = _descriptor.ServiceDescriptor( - name='EntityDiscovery', - full_name='cloudstate.EntityDiscovery', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=1281, - serialized_end=1431, - methods=[ - _descriptor.MethodDescriptor( - name='discover', - full_name='cloudstate.EntityDiscovery.discover', - index=0, - containing_service=None, - input_type=_PROXYINFO, - output_type=_ENTITYSPEC, - serialized_options=None, - ), - _descriptor.MethodDescriptor( - name='reportError', - full_name='cloudstate.EntityDiscovery.reportError', - index=1, - containing_service=None, - input_type=_USERFUNCTIONERROR, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_ENTITYDISCOVERY) - -DESCRIPTOR.services_by_name['EntityDiscovery'] = _ENTITYDISCOVERY - -# @@protoc_insertion_point(module_scope) diff --git a/cloudstate/entity_pb2_grpc.py b/cloudstate/entity_pb2_grpc.py deleted file mode 100644 index e78c925..0000000 --- a/cloudstate/entity_pb2_grpc.py +++ /dev/null @@ -1,67 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from cloudstate import entity_pb2 as cloudstate_dot_entity__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class EntityDiscoveryStub(object): - """Entity discovery service. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.discover = channel.unary_unary( - '/cloudstate.EntityDiscovery/discover', - request_serializer=cloudstate_dot_entity__pb2.ProxyInfo.SerializeToString, - response_deserializer=cloudstate_dot_entity__pb2.EntitySpec.FromString, - ) - self.reportError = channel.unary_unary( - '/cloudstate.EntityDiscovery/reportError', - request_serializer=cloudstate_dot_entity__pb2.UserFunctionError.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class EntityDiscoveryServicer(object): - """Entity discovery service. - """ - - def discover(self, request, context): - """Discover what entities the user function wishes to serve. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def reportError(self, request, context): - """Report an error back to the user function. This will only be invoked to tell the user function - that it has done something wrong, eg, violated the protocol, tried to use an entity type that - isn't supported, or attempted to forward to an entity that doesn't exist, etc. These messages - should be logged clearly for debugging purposes. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_EntityDiscoveryServicer_to_server(servicer, server): - rpc_method_handlers = { - 'discover': grpc.unary_unary_rpc_method_handler( - servicer.discover, - request_deserializer=cloudstate_dot_entity__pb2.ProxyInfo.FromString, - response_serializer=cloudstate_dot_entity__pb2.EntitySpec.SerializeToString, - ), - 'reportError': grpc.unary_unary_rpc_method_handler( - servicer.reportError, - request_deserializer=cloudstate_dot_entity__pb2.UserFunctionError.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'cloudstate.EntityDiscovery', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/cloudstate/evensourced_servicer.py b/cloudstate/evensourced_servicer.py deleted file mode 100644 index 6e54104..0000000 --- a/cloudstate/evensourced_servicer.py +++ /dev/null @@ -1,125 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -import logging -from pprint import pprint -from typing import List - -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.any_pb2 import Any - -from cloudstate.entity_pb2 import Command -from cloudstate.event_sourced_context import SnapshotContext, EventSourcedCommandContext, EventContext -from cloudstate.event_sourced_entity import EventSourcedEntity, EventSourcedHandler -from cloudstate.event_sourced_pb2 import EventSourcedInit, EventSourcedSnapshot, EventSourcedEvent, EventSourcedReply, \ - EventSourcedStreamOut -from cloudstate.event_sourced_pb2_grpc import EventSourcedServicer - -_sym_db = _symbol_database.Default() - -TYPE_URL_PREFIX = 'type.googleapis.com/' - - -def get_payload(command): - command_type: str = command.payload.type_url - if command_type.startswith(TYPE_URL_PREFIX): - command_type = command_type[len(TYPE_URL_PREFIX):] - command_class = _sym_db.GetSymbol(command_type) - cmd = command_class() - cmd.ParseFromString(command.payload.value) - return cmd - - -def pack(event): - any = Any() - any.Pack(event) - return any - - -class CloudStateEventSourcedServicer(EventSourcedServicer): - def __init__(self,event_sourced_entities: List[EventSourcedEntity]): - self.event_sourced_entities = { entity.name():entity for entity in event_sourced_entities} - - def handle(self, request_iterator, context): - initiated = False - current_state = None - handler:EventSourcedHandler = None - entity_id:str = None - start_sequence_number:int =0 - for request in request_iterator: - if not initiated: - if request.HasField("init"): - init:EventSourcedInit = request.init - service_name = init.service_name - entity_id = init.entity_id - if not service_name in self.event_sourced_entities: - raise Exception("No event sourced entity registered for service {}".format(service_name)) - entity = self.event_sourced_entities[service_name] - handler = EventSourcedHandler(entity) - current_state = handler.init_state(entity_id) - initiated = True - if init.HasField('snapshot'): - event_sourced_snapshot:EventSourcedSnapshot= init.snapshot - start_sequence_number = event_sourced_snapshot.snapshot_sequence - snapshot = get_payload(event_sourced_snapshot.snapshot) - snapshot_context = SnapshotContext(entity_id,start_sequence_number) - snapshot_result = handler.handle_snapshot(current_state,snapshot,snapshot_context) - if snapshot_result: - current_state = snapshot_result - else: - raise Exception("Cannot handle {} before initialization".format(request)) - - elif request.HasField('event'): - event:EventSourcedEvent = request.event - evt = get_payload(event) - event_result = handler.handle_event(current_state, evt, - EventContext(entity_id, event.sequence)) - start_sequence_number = event.sequence - if event_result: - current_state = event_result - pprint("Handling event {}".format(event)) - elif request.HasField('command'): - command:Command = request.command - cmd = get_payload(command) - ctx = EventSourcedCommandContext(command.name,command.id,entity_id,start_sequence_number) - result = None - try: - result = handler.handle_command(current_state,cmd,ctx) - except Exception as ex: - ctx.fail(str(ex)) - logging.exception('Failed to execute command:'+str(ex)) - - client_action = ctx.create_client_action(result, False) - event_sourced_reply = EventSourcedReply() - event_sourced_reply.command_id = command.id - event_sourced_reply.client_action.CopyFrom(client_action) - snapshot = None - perform_snapshot=False - if not ctx.has_errors(): - for number, event in enumerate(ctx.events): - sequence_number = start_sequence_number+number+1 - event_result = handler.handle_event(current_state,event,EventContext(entity_id,start_sequence_number+number)) - if event_result: - current_state = event_result - snapshot_every=handler.entity.snapshot_every - perform_snapshot = (snapshot_every > 0) and ( - perform_snapshot or (sequence_number % snapshot_every == 0)) - end_sequence_number = start_sequence_number + len(ctx.events) - if perform_snapshot: - snapshot = handler.snapshot(current_state,SnapshotContext(entity_id,end_sequence_number)) - - event_sourced_reply.side_effects.extend(ctx.effects) - event_sourced_reply.events.extend([pack(event) for event in ctx.events]) - if snapshot: - event_sourced_reply.snapshot.Pack(snapshot) - - output = EventSourcedStreamOut() - output.reply.CopyFrom(event_sourced_reply) - yield output - - else: - raise Exception("Cannot handle {} after initialization".format(type(request))) - - diff --git a/cloudstate/event_sourced_context.py b/cloudstate/event_sourced_context.py deleted file mode 100644 index 8e7d871..0000000 --- a/cloudstate/event_sourced_context.py +++ /dev/null @@ -1,43 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -from dataclasses import dataclass, field -from typing import List, Any - -from cloudstate.contexts import ClientActionContext -from cloudstate.entity_pb2 import Forward, SideEffect - - -@dataclass -class EventSourcedCommandContext(ClientActionContext): - """An event sourced command context. - Command Handler Methods may take this is a parameter. It allows emitting - new events in response to a command, along with forwarding the result to other entities, and - performing side effects on other entities""" - command_name:str - command_id:int - entity_id:str - sequence:int - events:List[Any]=field(default_factory=list) - errors: List[str] = field(default_factory=list) - effects: List[SideEffect] = field(default_factory=list) - forward: Forward = None - - - def emit(self, event): - """Emit the given event. The event will be persisted, and the handler of the event defined in the - current behavior will immediately be executed to pick it up""" - self.events.append(event) - -@dataclass -class SnapshotContext: - entity_id:str - sequence_number: int - - -@dataclass -class EventContext: - entity_id: str - sequence_number:int \ No newline at end of file diff --git a/cloudstate/event_sourced_entity.py b/cloudstate/event_sourced_entity.py deleted file mode 100644 index 37cf697..0000000 --- a/cloudstate/event_sourced_entity.py +++ /dev/null @@ -1,150 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -from dataclasses import dataclass, field -from typing import List, Callable, Any, Mapping, MutableMapping -import inspect - -from google.protobuf import descriptor as _descriptor - -from cloudstate.event_sourced_context import SnapshotContext, EventContext, EventSourcedCommandContext -from cloudstate.event_sourced_pb2 import _EVENTSOURCED - - -@dataclass -class EventSourcedEntity: - service_descriptor: _descriptor.ServiceDescriptor - file_descriptors: List[_descriptor.FileDescriptor] - init_state: Callable[[str], Any] - persistence_id: str = None - snapshot_every: int = 0 - snapshot_function: Callable[[Any], Any] = None - snapshot_handler_function: Callable[[Any, Any], Any] = None - command_handlers: MutableMapping[str, Callable] = field(default_factory=dict) - event_handlers: MutableMapping[type, Callable] = field(default_factory=dict) - - def __post_init__(self): - if not self.persistence_id: - self.persistence_id = self.service_descriptor.full_name - - def entity_type(self): - return _EVENTSOURCED.full_name - - def snapshot(self): - def register_snapshot(function: Callable[[Any], Any]): - """ - Register the function to snapshot the state - """ - if self.snapshot_function: - raise Exception("Snapshot function {} already defined for this entity".format(self.snapshot_function)) - if function.__code__.co_argcount > 2: - raise Exception("At most 2 parameters, the current state and the snapshot context, should be accepted by the snapshot function") - self.snapshot_function = function - return function - - return register_snapshot - - def snapshot_handler(self): - def register_snapshot_handler(function): - """ - Register the function to handle snapshots - """ - if self.snapshot_handler_function: - raise Exception("Snapshot handler function {} already defined for this entity".format( - self.snapshot_handler_function)) - if function.__code__.co_argcount > 2: - raise Exception( - "At most two parameters, the current state and the snapshot, should be accepted by the snapshot_handler function") - self.snapshot_handler_function = function - return function - - return register_snapshot_handler - - def command_handler(self, name: str): - def register_command_handler(function): - """ - Register the function to handle commands - """ - if name in self.command_handlers: - raise Exception("Command handler function {} already defined for command {}".format( - self.command_handlers[name], name)) - if function.__code__.co_argcount > 3: - raise Exception( - "At most three parameters, the current state, the command and the context, should be accepted by the command_handler function") - self.command_handlers[name] = function - return function - - return register_command_handler - - def event_handler(self, event_type: type): - def register_event_handler(function): - """ - Register the function to handle events - """ - if event_type in self.event_handlers: - raise Exception("Event handler function {} already defined for type {}".format( - self.event_handlers[event_type], event_type)) - if function.__code__.co_argcount > 2: - raise Exception( - "At most two parameters, the current state and the event, should be accepted by the command_handler function") - self.event_handlers[event_type]=function - return function - - return register_event_handler - - def name(self): - return self.service_descriptor.full_name - -def invoke(function,parameters): - ordered_parameters = [] - for parameter_definition in inspect.signature(function).parameters.values(): - annotation = parameter_definition.annotation - if annotation == inspect._empty: - raise Exception("Cannot inject parameter {} of function {}: Missing type annotation".format(parameter_definition.name,function)) - match_found = False - for param in parameters: - if isinstance(param,annotation): - match_found=True - ordered_parameters.append(param) - if not match_found: - raise Exception("Cannot inject parameter {} of function {}: No matching value".format(parameter_definition.name,function)) - return function(*ordered_parameters) - - -@dataclass -class EventSourcedHandler: - entity:EventSourcedEntity - - def init_state(self,entity_id:str): - return self.entity.init_state(entity_id) - - def snapshot(self,current_state,snapshot_context:SnapshotContext): - if not self.entity.snapshot_function: - raise Exception("Missing snapshot function for entity {}".format(self.entity.name())) - return invoke(self.entity.snapshot_function,[current_state,snapshot_context]) - - def handle_snapshot(self,current_state,snapshot,snapshot_context:SnapshotContext): - if not self.entity.snapshot_handler_function: - raise Exception("Missing snapshot handler function for entity {}".format(self.entity.name())) - return invoke(self.entity.snapshot_handler_function,[current_state,snapshot,snapshot_context]) - - def handle_event(self,current_state,event,event_context:EventContext): - event_type = type(event) - handler_function = None - if event_type in self.entity.event_handlers: - handler_function = self.entity.event_handlers[event_type] - else: - for event_type,function in self.entity.event_handlers: - if isinstance(event,event_type): - handler_function = function - if not handler_function: - raise Exception("Missing event handler function for entity {} and event type".format(self.entity.name(),event_type)) - return invoke(handler_function,[current_state,event,event_context]) - - def handle_command(self,current_state,command,ctx:EventSourcedCommandContext): - if ctx.command_name not in self.entity.command_handlers: - raise Exception("Missing command handler function for entity {} and command".format(self.entity.name(),ctx.command_name)) - return invoke(self.entity.command_handlers[ctx.command_name],[current_state,command,ctx]) - diff --git a/cloudstate/event_sourced_pb2.py b/cloudstate/event_sourced_pb2.py deleted file mode 100644 index eea9a35..0000000 --- a/cloudstate/event_sourced_pb2.py +++ /dev/null @@ -1,403 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: cloudstate/event_sourced.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 -from cloudstate import entity_pb2 as cloudstate_dot_entity__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='cloudstate/event_sourced.proto', - package='cloudstate.eventsourced', - syntax='proto3', - serialized_options=_b('\n\026io.cloudstate.protocol'), - serialized_pb=_b('\n\x1e\x63loudstate/event_sourced.proto\x12\x17\x63loudstate.eventsourced\x1a\x19google/protobuf/any.proto\x1a\x17\x63loudstate/entity.proto\"|\n\x10\x45ventSourcedInit\x12\x14\n\x0cservice_name\x18\x01 \x01(\t\x12\x11\n\tentity_id\x18\x02 \x01(\t\x12?\n\x08snapshot\x18\x03 \x01(\x0b\x32-.cloudstate.eventsourced.EventSourcedSnapshot\"Y\n\x14\x45ventSourcedSnapshot\x12\x19\n\x11snapshot_sequence\x18\x01 \x01(\x03\x12&\n\x08snapshot\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\"L\n\x11\x45ventSourcedEvent\x12\x10\n\x08sequence\x18\x01 \x01(\x03\x12%\n\x07payload\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\"\xd4\x01\n\x11\x45ventSourcedReply\x12\x12\n\ncommand_id\x18\x01 \x01(\x03\x12/\n\rclient_action\x18\x02 \x01(\x0b\x32\x18.cloudstate.ClientAction\x12,\n\x0cside_effects\x18\x03 \x03(\x0b\x32\x16.cloudstate.SideEffect\x12$\n\x06\x65vents\x18\x04 \x03(\x0b\x32\x14.google.protobuf.Any\x12&\n\x08snapshot\x18\x05 \x01(\x0b\x32\x14.google.protobuf.Any\"\xc1\x01\n\x14\x45ventSourcedStreamIn\x12\x39\n\x04init\x18\x01 \x01(\x0b\x32).cloudstate.eventsourced.EventSourcedInitH\x00\x12;\n\x05\x65vent\x18\x02 \x01(\x0b\x32*.cloudstate.eventsourced.EventSourcedEventH\x00\x12&\n\x07\x63ommand\x18\x03 \x01(\x0b\x32\x13.cloudstate.CommandH\x00\x42\t\n\x07message\"\x87\x01\n\x15\x45ventSourcedStreamOut\x12;\n\x05reply\x18\x01 \x01(\x0b\x32*.cloudstate.eventsourced.EventSourcedReplyH\x00\x12&\n\x07\x66\x61ilure\x18\x02 \x01(\x0b\x32\x13.cloudstate.FailureH\x00\x42\t\n\x07message2}\n\x0c\x45ventSourced\x12m\n\x06handle\x12-.cloudstate.eventsourced.EventSourcedStreamIn\x1a..cloudstate.eventsourced.EventSourcedStreamOut\"\x00(\x01\x30\x01\x42\x18\n\x16io.cloudstate.protocolb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,cloudstate_dot_entity__pb2.DESCRIPTOR,]) - - - - -_EVENTSOURCEDINIT = _descriptor.Descriptor( - name='EventSourcedInit', - full_name='cloudstate.eventsourced.EventSourcedInit', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='service_name', full_name='cloudstate.eventsourced.EventSourcedInit.service_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='entity_id', full_name='cloudstate.eventsourced.EventSourcedInit.entity_id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='snapshot', full_name='cloudstate.eventsourced.EventSourcedInit.snapshot', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=111, - serialized_end=235, -) - - -_EVENTSOURCEDSNAPSHOT = _descriptor.Descriptor( - name='EventSourcedSnapshot', - full_name='cloudstate.eventsourced.EventSourcedSnapshot', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='snapshot_sequence', full_name='cloudstate.eventsourced.EventSourcedSnapshot.snapshot_sequence', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='snapshot', full_name='cloudstate.eventsourced.EventSourcedSnapshot.snapshot', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=237, - serialized_end=326, -) - - -_EVENTSOURCEDEVENT = _descriptor.Descriptor( - name='EventSourcedEvent', - full_name='cloudstate.eventsourced.EventSourcedEvent', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='sequence', full_name='cloudstate.eventsourced.EventSourcedEvent.sequence', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='payload', full_name='cloudstate.eventsourced.EventSourcedEvent.payload', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=328, - serialized_end=404, -) - - -_EVENTSOURCEDREPLY = _descriptor.Descriptor( - name='EventSourcedReply', - full_name='cloudstate.eventsourced.EventSourcedReply', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='command_id', full_name='cloudstate.eventsourced.EventSourcedReply.command_id', index=0, - number=1, type=3, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='client_action', full_name='cloudstate.eventsourced.EventSourcedReply.client_action', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='side_effects', full_name='cloudstate.eventsourced.EventSourcedReply.side_effects', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='events', full_name='cloudstate.eventsourced.EventSourcedReply.events', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='snapshot', full_name='cloudstate.eventsourced.EventSourcedReply.snapshot', index=4, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=407, - serialized_end=619, -) - - -_EVENTSOURCEDSTREAMIN = _descriptor.Descriptor( - name='EventSourcedStreamIn', - full_name='cloudstate.eventsourced.EventSourcedStreamIn', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='init', full_name='cloudstate.eventsourced.EventSourcedStreamIn.init', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='event', full_name='cloudstate.eventsourced.EventSourcedStreamIn.event', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='command', full_name='cloudstate.eventsourced.EventSourcedStreamIn.command', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='message', full_name='cloudstate.eventsourced.EventSourcedStreamIn.message', - index=0, containing_type=None, fields=[]), - ], - serialized_start=622, - serialized_end=815, -) - - -_EVENTSOURCEDSTREAMOUT = _descriptor.Descriptor( - name='EventSourcedStreamOut', - full_name='cloudstate.eventsourced.EventSourcedStreamOut', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='reply', full_name='cloudstate.eventsourced.EventSourcedStreamOut.reply', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='failure', full_name='cloudstate.eventsourced.EventSourcedStreamOut.failure', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='message', full_name='cloudstate.eventsourced.EventSourcedStreamOut.message', - index=0, containing_type=None, fields=[]), - ], - serialized_start=818, - serialized_end=953, -) - -_EVENTSOURCEDINIT.fields_by_name['snapshot'].message_type = _EVENTSOURCEDSNAPSHOT -_EVENTSOURCEDSNAPSHOT.fields_by_name['snapshot'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_EVENTSOURCEDEVENT.fields_by_name['payload'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_EVENTSOURCEDREPLY.fields_by_name['client_action'].message_type = cloudstate_dot_entity__pb2._CLIENTACTION -_EVENTSOURCEDREPLY.fields_by_name['side_effects'].message_type = cloudstate_dot_entity__pb2._SIDEEFFECT -_EVENTSOURCEDREPLY.fields_by_name['events'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_EVENTSOURCEDREPLY.fields_by_name['snapshot'].message_type = google_dot_protobuf_dot_any__pb2._ANY -_EVENTSOURCEDSTREAMIN.fields_by_name['init'].message_type = _EVENTSOURCEDINIT -_EVENTSOURCEDSTREAMIN.fields_by_name['event'].message_type = _EVENTSOURCEDEVENT -_EVENTSOURCEDSTREAMIN.fields_by_name['command'].message_type = cloudstate_dot_entity__pb2._COMMAND -_EVENTSOURCEDSTREAMIN.oneofs_by_name['message'].fields.append( - _EVENTSOURCEDSTREAMIN.fields_by_name['init']) -_EVENTSOURCEDSTREAMIN.fields_by_name['init'].containing_oneof = _EVENTSOURCEDSTREAMIN.oneofs_by_name['message'] -_EVENTSOURCEDSTREAMIN.oneofs_by_name['message'].fields.append( - _EVENTSOURCEDSTREAMIN.fields_by_name['event']) -_EVENTSOURCEDSTREAMIN.fields_by_name['event'].containing_oneof = _EVENTSOURCEDSTREAMIN.oneofs_by_name['message'] -_EVENTSOURCEDSTREAMIN.oneofs_by_name['message'].fields.append( - _EVENTSOURCEDSTREAMIN.fields_by_name['command']) -_EVENTSOURCEDSTREAMIN.fields_by_name['command'].containing_oneof = _EVENTSOURCEDSTREAMIN.oneofs_by_name['message'] -_EVENTSOURCEDSTREAMOUT.fields_by_name['reply'].message_type = _EVENTSOURCEDREPLY -_EVENTSOURCEDSTREAMOUT.fields_by_name['failure'].message_type = cloudstate_dot_entity__pb2._FAILURE -_EVENTSOURCEDSTREAMOUT.oneofs_by_name['message'].fields.append( - _EVENTSOURCEDSTREAMOUT.fields_by_name['reply']) -_EVENTSOURCEDSTREAMOUT.fields_by_name['reply'].containing_oneof = _EVENTSOURCEDSTREAMOUT.oneofs_by_name['message'] -_EVENTSOURCEDSTREAMOUT.oneofs_by_name['message'].fields.append( - _EVENTSOURCEDSTREAMOUT.fields_by_name['failure']) -_EVENTSOURCEDSTREAMOUT.fields_by_name['failure'].containing_oneof = _EVENTSOURCEDSTREAMOUT.oneofs_by_name['message'] -DESCRIPTOR.message_types_by_name['EventSourcedInit'] = _EVENTSOURCEDINIT -DESCRIPTOR.message_types_by_name['EventSourcedSnapshot'] = _EVENTSOURCEDSNAPSHOT -DESCRIPTOR.message_types_by_name['EventSourcedEvent'] = _EVENTSOURCEDEVENT -DESCRIPTOR.message_types_by_name['EventSourcedReply'] = _EVENTSOURCEDREPLY -DESCRIPTOR.message_types_by_name['EventSourcedStreamIn'] = _EVENTSOURCEDSTREAMIN -DESCRIPTOR.message_types_by_name['EventSourcedStreamOut'] = _EVENTSOURCEDSTREAMOUT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -EventSourcedInit = _reflection.GeneratedProtocolMessageType('EventSourcedInit', (_message.Message,), { - 'DESCRIPTOR' : _EVENTSOURCEDINIT, - '__module__' : 'cloudstate.event_sourced_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.eventsourced.EventSourcedInit) - }) -_sym_db.RegisterMessage(EventSourcedInit) - -EventSourcedSnapshot = _reflection.GeneratedProtocolMessageType('EventSourcedSnapshot', (_message.Message,), { - 'DESCRIPTOR' : _EVENTSOURCEDSNAPSHOT, - '__module__' : 'cloudstate.event_sourced_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.eventsourced.EventSourcedSnapshot) - }) -_sym_db.RegisterMessage(EventSourcedSnapshot) - -EventSourcedEvent = _reflection.GeneratedProtocolMessageType('EventSourcedEvent', (_message.Message,), { - 'DESCRIPTOR' : _EVENTSOURCEDEVENT, - '__module__' : 'cloudstate.event_sourced_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.eventsourced.EventSourcedEvent) - }) -_sym_db.RegisterMessage(EventSourcedEvent) - -EventSourcedReply = _reflection.GeneratedProtocolMessageType('EventSourcedReply', (_message.Message,), { - 'DESCRIPTOR' : _EVENTSOURCEDREPLY, - '__module__' : 'cloudstate.event_sourced_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.eventsourced.EventSourcedReply) - }) -_sym_db.RegisterMessage(EventSourcedReply) - -EventSourcedStreamIn = _reflection.GeneratedProtocolMessageType('EventSourcedStreamIn', (_message.Message,), { - 'DESCRIPTOR' : _EVENTSOURCEDSTREAMIN, - '__module__' : 'cloudstate.event_sourced_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.eventsourced.EventSourcedStreamIn) - }) -_sym_db.RegisterMessage(EventSourcedStreamIn) - -EventSourcedStreamOut = _reflection.GeneratedProtocolMessageType('EventSourcedStreamOut', (_message.Message,), { - 'DESCRIPTOR' : _EVENTSOURCEDSTREAMOUT, - '__module__' : 'cloudstate.event_sourced_pb2' - # @@protoc_insertion_point(class_scope:cloudstate.eventsourced.EventSourcedStreamOut) - }) -_sym_db.RegisterMessage(EventSourcedStreamOut) - - -DESCRIPTOR._options = None - -_EVENTSOURCED = _descriptor.ServiceDescriptor( - name='EventSourced', - full_name='cloudstate.eventsourced.EventSourced', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=955, - serialized_end=1080, - methods=[ - _descriptor.MethodDescriptor( - name='handle', - full_name='cloudstate.eventsourced.EventSourced.handle', - index=0, - containing_service=None, - input_type=_EVENTSOURCEDSTREAMIN, - output_type=_EVENTSOURCEDSTREAMOUT, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_EVENTSOURCED) - -DESCRIPTOR.services_by_name['EventSourced'] = _EVENTSOURCED - -# @@protoc_insertion_point(module_scope) diff --git a/cloudstate/event_sourced_pb2_grpc.py b/cloudstate/event_sourced_pb2_grpc.py deleted file mode 100644 index 5564da1..0000000 --- a/cloudstate/event_sourced_pb2_grpc.py +++ /dev/null @@ -1,55 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from cloudstate import event_sourced_pb2 as cloudstate_dot_event__sourced__pb2 - - -class EventSourcedStub(object): - """The Entity service - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.handle = channel.stream_stream( - '/cloudstate.eventsourced.EventSourced/handle', - request_serializer=cloudstate_dot_event__sourced__pb2.EventSourcedStreamIn.SerializeToString, - response_deserializer=cloudstate_dot_event__sourced__pb2.EventSourcedStreamOut.FromString, - ) - - -class EventSourcedServicer(object): - """The Entity service - """ - - def handle(self, request_iterator, context): - """The stream. One stream will be established per active entity. - Once established, the first message sent will be Init, which contains the entity ID, and, - if the entity has previously persisted a snapshot, it will contain that snapshot. It will - then send zero to many event messages, one for each event previously persisted. The entity - is expected to apply these to its state in a deterministic fashion. Once all the events - are sent, one to many commands are sent, with new commands being sent as new requests for - the entity come in. The entity is expected to reply to each command with exactly one reply - message. The entity should reply in order, and any events that the entity requests to be - persisted the entity should handle itself, applying them to its own state, as if they had - arrived as events when the event stream was being replayed on load. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_EventSourcedServicer_to_server(servicer, server): - rpc_method_handlers = { - 'handle': grpc.stream_stream_rpc_method_handler( - servicer.handle, - request_deserializer=cloudstate_dot_event__sourced__pb2.EventSourcedStreamIn.FromString, - response_serializer=cloudstate_dot_event__sourced__pb2.EventSourcedStreamOut.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'cloudstate.eventsourced.EventSourced', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/cloudstate/tests/test_cloudstate.py b/cloudstate/tests/test_cloudstate.py deleted file mode 100644 index 9630c91..0000000 --- a/cloudstate/tests/test_cloudstate.py +++ /dev/null @@ -1,12 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - - -def inc(x): - return x + 1 - - -def test_answer(): - assert inc(3) == 4 diff --git a/cloudstate/version.py b/cloudstate/version.py deleted file mode 100644 index 12afd0f..0000000 --- a/cloudstate/version.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -__version__ = "0.1.1" diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 958296d..0000000 --- a/docs/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# Cloudstate Python documentation - -Documentation source for Cloudstate Python, published to https://cloudstate.io/docs/python/current/ - -To build the docs with [sbt](https://www.scala-sbt.org): - -``` -sbt paradox -``` - -Can also first start the sbt interactive shell with `sbt`, then run commands. - -The documentation can be viewed locally by opening the generated pages: - -``` -open target/paradox/site/main/index.html -``` - -To watch files for changes and rebuild docs automatically: - -``` -sbt ~paradox -``` diff --git a/docs/build.sbt b/docs/build.sbt deleted file mode 100644 index 9b40a52..0000000 --- a/docs/build.sbt +++ /dev/null @@ -1,10 +0,0 @@ -lazy val docs = project - .in(file(".")) - .enablePlugins(CloudstateParadoxPlugin) - .settings( - deployModule := "python", - paradoxProperties in Compile ++= Map( - "cloudstate.python.version" -> { if (isSnapshot.value) previousStableVersion.value.getOrElse("0.0.0") else version.value }, - "extref.cloudstate.base_url" -> "https://cloudstate.io/docs/core/current/%s" - ) - ) diff --git a/docs/project/build.properties b/docs/project/build.properties deleted file mode 100644 index 654fe70..0000000 --- a/docs/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=1.3.12 diff --git a/docs/project/plugins.sbt b/docs/project/plugins.sbt deleted file mode 100644 index 2afe97a..0000000 --- a/docs/project/plugins.sbt +++ /dev/null @@ -1,2 +0,0 @@ -addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.0.0") -addSbtPlugin("io.cloudstate" % "sbt-cloudstate-paradox" % "0.1.2") diff --git a/docs/src/main/paradox/gettingstarted.md b/docs/src/main/paradox/gettingstarted.md deleted file mode 100644 index 58863c1..0000000 --- a/docs/src/main/paradox/gettingstarted.md +++ /dev/null @@ -1,11 +0,0 @@ -# Getting started - -Install current version: - -@@@vars -``` -pip install cloudstate==$cloudstate.python.version$ -``` -@@@ - -Link to @extref:[event sourcing](cloudstate:user/features/eventsourced.html) diff --git a/docs/src/main/paradox/index.md b/docs/src/main/paradox/index.md deleted file mode 100644 index b2f9f2f..0000000 --- a/docs/src/main/paradox/index.md +++ /dev/null @@ -1,7 +0,0 @@ -# Cloudstate Python - -Link to @extref:[core docs](cloudstate:index.html) - -@@@ index -* [Getting started](gettingstarted.md) -@@@ diff --git a/protobuf/example/shoppingcart/persistence/domain.proto b/protobuf/example/shoppingcart/persistence/domain.proto deleted file mode 100644 index e92a791..0000000 --- a/protobuf/example/shoppingcart/persistence/domain.proto +++ /dev/null @@ -1,25 +0,0 @@ -// These are the messages that get persisted - the events, plus the current state (Cart) for snapshots. -syntax = "proto3"; - -package com.example.shoppingcart.persistence; - -message LineItem { - string productId = 1; - string name = 2; - int32 quantity = 3; -} - -// The item added event. -message ItemAdded { - LineItem item = 1; -} - -// The item removed event. -message ItemRemoved { - string productId = 1; -} - -// The shopping cart state. -message Cart { - repeated LineItem items = 1; -} diff --git a/protobuf/example/shoppingcart/shoppingcart.proto b/protobuf/example/shoppingcart/shoppingcart.proto deleted file mode 100644 index a944bc5..0000000 --- a/protobuf/example/shoppingcart/shoppingcart.proto +++ /dev/null @@ -1,58 +0,0 @@ -// This is the public API offered by the shopping cart entity. -syntax = "proto3"; - -import "google/protobuf/empty.proto"; -import "cloudstate/entity_key.proto"; -import "google/api/annotations.proto"; -import "google/api/http.proto"; - -package com.example.shoppingcart; - -message AddLineItem { - string user_id = 1 [(.cloudstate.entity_key) = true]; - string product_id = 2; - string name = 3; - int32 quantity = 4; -} - -message RemoveLineItem { - string user_id = 1 [(.cloudstate.entity_key) = true]; - string product_id = 2; -} - -message GetShoppingCart { - string user_id = 1 [(.cloudstate.entity_key) = true]; -} - -message LineItem { - string product_id = 1; - string name = 2; - int32 quantity = 3; -} - -message Cart { - repeated LineItem items = 1; -} - -service ShoppingCart { - rpc AddItem(AddLineItem) returns (google.protobuf.Empty) { - option (google.api.http) = { - post: "/cart/{user_id}/items/add", - body: "*", - }; - } - - rpc RemoveItem(RemoveLineItem) returns (google.protobuf.Empty) { - option (google.api.http).post = "/cart/{user_id}/items/{product_id}/remove"; - } - - rpc GetCart(GetShoppingCart) returns (Cart) { - option (google.api.http) = { - get: "/carts/{user_id}", - additional_bindings: { - get: "/carts/{user_id}/items", - response_body: "items" - } - }; - } -} diff --git a/protobuf/frontend/cloudstate/entity_key.proto b/protobuf/frontend/cloudstate/entity_key.proto deleted file mode 100644 index 9cd044a..0000000 --- a/protobuf/frontend/cloudstate/entity_key.proto +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2019 Lightbend Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Extension for specifying which field in a message is to be considered an -// entity key, for the purposes associating gRPC calls with entities and -// sharding. - -syntax = "proto3"; - -import "google/protobuf/descriptor.proto"; - -package cloudstate; - -option java_package = "io.cloudstate"; - -extend google.protobuf.FieldOptions { - bool entity_key = 50002; -} diff --git a/protobuf/frontend/google/api/annotations.proto b/protobuf/frontend/google/api/annotations.proto deleted file mode 100644 index 85c361b..0000000 --- a/protobuf/frontend/google/api/annotations.proto +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) 2015, Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.api; - -import "google/api/http.proto"; -import "google/protobuf/descriptor.proto"; - -option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; -option java_multiple_files = true; -option java_outer_classname = "AnnotationsProto"; -option java_package = "com.google.api"; -option objc_class_prefix = "GAPI"; - -extend google.protobuf.MethodOptions { - // See `HttpRule`. - HttpRule http = 72295728; -} diff --git a/protobuf/frontend/google/api/http.proto b/protobuf/frontend/google/api/http.proto deleted file mode 100644 index b2977f5..0000000 --- a/protobuf/frontend/google/api/http.proto +++ /dev/null @@ -1,376 +0,0 @@ -// Copyright 2019 Google LLC. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// - -syntax = "proto3"; - -package google.api; - -option cc_enable_arenas = true; -option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; -option java_multiple_files = true; -option java_outer_classname = "HttpProto"; -option java_package = "com.google.api"; -option objc_class_prefix = "GAPI"; - -// Defines the HTTP configuration for an API service. It contains a list of -// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method -// to one or more HTTP REST API methods. -message Http { - // A list of HTTP configuration rules that apply to individual API methods. - // - // **NOTE:** All service configuration rules follow "last one wins" order. - repeated HttpRule rules = 1; - - // When set to true, URL path parameters will be fully URI-decoded except in - // cases of single segment matches in reserved expansion, where "%2F" will be - // left encoded. - // - // The default behavior is to not decode RFC 6570 reserved characters in multi - // segment matches. - bool fully_decode_reserved_expansion = 2; -} - -// # gRPC Transcoding -// -// gRPC Transcoding is a feature for mapping between a gRPC method and one or -// more HTTP REST endpoints. It allows developers to build a single API service -// that supports both gRPC APIs and REST APIs. Many systems, including [Google -// APIs](https://github.com/googleapis/googleapis), -// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC -// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), -// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature -// and use it for large scale production services. -// -// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies -// how different portions of the gRPC request message are mapped to the URL -// path, URL query parameters, and HTTP request body. It also controls how the -// gRPC response message is mapped to the HTTP response body. `HttpRule` is -// typically specified as an `google.api.http` annotation on the gRPC method. -// -// Each mapping specifies a URL path template and an HTTP method. The path -// template may refer to one or more fields in the gRPC request message, as long -// as each field is a non-repeated field with a primitive (non-message) type. -// The path template controls how fields of the request message are mapped to -// the URL path. -// -// Example: -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http) = { -// get: "/v1/{name=messages/*}" -// }; -// } -// } -// message GetMessageRequest { -// string name = 1; // Mapped to URL path. -// } -// message Message { -// string text = 1; // The resource content. -// } -// -// This enables an HTTP REST to gRPC mapping as below: -// -// HTTP | gRPC -// -----|----- -// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` -// -// Any fields in the request message which are not bound by the path template -// automatically become HTTP query parameters if there is no HTTP request body. -// For example: -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http) = { -// get:"/v1/messages/{message_id}" -// }; -// } -// } -// message GetMessageRequest { -// message SubMessage { -// string subfield = 1; -// } -// string message_id = 1; // Mapped to URL path. -// int64 revision = 2; // Mapped to URL query parameter `revision`. -// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. -// } -// -// This enables a HTTP JSON to RPC mapping as below: -// -// HTTP | gRPC -// -----|----- -// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | -// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: -// "foo"))` -// -// Note that fields which are mapped to URL query parameters must have a -// primitive type or a repeated primitive type or a non-repeated message type. -// In the case of a repeated type, the parameter can be repeated in the URL -// as `...?param=A¶m=B`. In the case of a message type, each field of the -// message is mapped to a separate parameter, such as -// `...?foo.a=A&foo.b=B&foo.c=C`. -// -// For HTTP methods that allow a request body, the `body` field -// specifies the mapping. Consider a REST update method on the -// message resource collection: -// -// service Messaging { -// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { -// option (google.api.http) = { -// patch: "/v1/messages/{message_id}" -// body: "message" -// }; -// } -// } -// message UpdateMessageRequest { -// string message_id = 1; // mapped to the URL -// Message message = 2; // mapped to the body -// } -// -// The following HTTP JSON to RPC mapping is enabled, where the -// representation of the JSON in the request body is determined by -// protos JSON encoding: -// -// HTTP | gRPC -// -----|----- -// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: -// "123456" message { text: "Hi!" })` -// -// The special name `*` can be used in the body mapping to define that -// every field not bound by the path template should be mapped to the -// request body. This enables the following alternative definition of -// the update method: -// -// service Messaging { -// rpc UpdateMessage(Message) returns (Message) { -// option (google.api.http) = { -// patch: "/v1/messages/{message_id}" -// body: "*" -// }; -// } -// } -// message Message { -// string message_id = 1; -// string text = 2; -// } -// -// -// The following HTTP JSON to RPC mapping is enabled: -// -// HTTP | gRPC -// -----|----- -// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: -// "123456" text: "Hi!")` -// -// Note that when using `*` in the body mapping, it is not possible to -// have HTTP parameters, as all fields not bound by the path end in -// the body. This makes this option more rarely used in practice when -// defining REST APIs. The common usage of `*` is in custom methods -// which don't use the URL at all for transferring data. -// -// It is possible to define multiple HTTP methods for one RPC by using -// the `additional_bindings` option. Example: -// -// service Messaging { -// rpc GetMessage(GetMessageRequest) returns (Message) { -// option (google.api.http) = { -// get: "/v1/messages/{message_id}" -// additional_bindings { -// get: "/v1/users/{user_id}/messages/{message_id}" -// } -// }; -// } -// } -// message GetMessageRequest { -// string message_id = 1; -// string user_id = 2; -// } -// -// This enables the following two alternative HTTP JSON to RPC mappings: -// -// HTTP | gRPC -// -----|----- -// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` -// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: -// "123456")` -// -// ## Rules for HTTP mapping -// -// 1. Leaf request fields (recursive expansion nested messages in the request -// message) are classified into three categories: -// - Fields referred by the path template. They are passed via the URL path. -// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP -// request body. -// - All other fields are passed via the URL query parameters, and the -// parameter name is the field path in the request message. A repeated -// field can be represented as multiple query parameters under the same -// name. -// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields -// are passed via URL path and HTTP request body. -// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all -// fields are passed via URL path and URL query parameters. -// -// ### Path template syntax -// -// Template = "/" Segments [ Verb ] ; -// Segments = Segment { "/" Segment } ; -// Segment = "*" | "**" | LITERAL | Variable ; -// Variable = "{" FieldPath [ "=" Segments ] "}" ; -// FieldPath = IDENT { "." IDENT } ; -// Verb = ":" LITERAL ; -// -// The syntax `*` matches a single URL path segment. The syntax `**` matches -// zero or more URL path segments, which must be the last part of the URL path -// except the `Verb`. -// -// The syntax `Variable` matches part of the URL path as specified by its -// template. A variable template must not contain other variables. If a variable -// matches a single path segment, its template may be omitted, e.g. `{var}` -// is equivalent to `{var=*}`. -// -// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` -// contains any reserved character, such characters should be percent-encoded -// before the matching. -// -// If a variable contains exactly one path segment, such as `"{var}"` or -// `"{var=*}"`, when such a variable is expanded into a URL path on the client -// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The -// server side does the reverse decoding. Such variables show up in the -// [Discovery -// Document](https://developers.google.com/discovery/v1/reference/apis) as -// `{var}`. -// -// If a variable contains multiple path segments, such as `"{var=foo/*}"` -// or `"{var=**}"`, when such a variable is expanded into a URL path on the -// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. -// The server side does the reverse decoding, except "%2F" and "%2f" are left -// unchanged. Such variables show up in the -// [Discovery -// Document](https://developers.google.com/discovery/v1/reference/apis) as -// `{+var}`. -// -// ## Using gRPC API Service Configuration -// -// gRPC API Service Configuration (service config) is a configuration language -// for configuring a gRPC service to become a user-facing product. The -// service config is simply the YAML representation of the `google.api.Service` -// proto message. -// -// As an alternative to annotating your proto file, you can configure gRPC -// transcoding in your service config YAML files. You do this by specifying a -// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same -// effect as the proto annotation. This can be particularly useful if you -// have a proto that is reused in multiple services. Note that any transcoding -// specified in the service config will override any matching transcoding -// configuration in the proto. -// -// Example: -// -// http: -// rules: -// # Selects a gRPC method and applies HttpRule to it. -// - selector: example.v1.Messaging.GetMessage -// get: /v1/messages/{message_id}/{sub.subfield} -// -// ## Special notes -// -// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the -// proto to JSON conversion must follow the [proto3 -// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). -// -// While the single segment variable follows the semantics of -// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String -// Expansion, the multi segment variable **does not** follow RFC 6570 Section -// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion -// does not expand special characters like `?` and `#`, which would lead -// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding -// for multi segment variables. -// -// The path variables **must not** refer to any repeated or mapped field, -// because client libraries are not capable of handling such variable expansion. -// -// The path variables **must not** capture the leading "/" character. The reason -// is that the most common use case "{var}" does not capture the leading "/" -// character. For consistency, all path variables must share the same behavior. -// -// Repeated message fields must not be mapped to URL query parameters, because -// no client library can support such complicated mapping. -// -// If an API needs to use a JSON array for request or response body, it can map -// the request or response body to a repeated field. However, some gRPC -// Transcoding implementations may not support this feature. -message HttpRule { - // Selects a method to which this rule applies. - // - // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. - string selector = 1; - - // Determines the URL pattern is matched by this rules. This pattern can be - // used with any of the {get|put|post|delete|patch} methods. A custom method - // can be defined using the 'custom' field. - oneof pattern { - // Maps to HTTP GET. Used for listing and getting information about - // resources. - string get = 2; - - // Maps to HTTP PUT. Used for replacing a resource. - string put = 3; - - // Maps to HTTP POST. Used for creating a resource or performing an action. - string post = 4; - - // Maps to HTTP DELETE. Used for deleting a resource. - string delete = 5; - - // Maps to HTTP PATCH. Used for updating a resource. - string patch = 6; - - // The custom pattern is used for specifying an HTTP method that is not - // included in the `pattern` field, such as HEAD, or "*" to leave the - // HTTP method unspecified for this rule. The wild-card rule is useful - // for services that provide content to Web (HTML) clients. - CustomHttpPattern custom = 8; - } - - // The name of the request field whose value is mapped to the HTTP request - // body, or `*` for mapping all request fields not captured by the path - // pattern to the HTTP body, or omitted for not having any HTTP request body. - // - // NOTE: the referred field must be present at the top-level of the request - // message type. - string body = 7; - - // Optional. The name of the response field whose value is mapped to the HTTP - // response body. When omitted, the entire response message will be used - // as the HTTP response body. - // - // NOTE: The referred field must be present at the top-level of the response - // message type. - string response_body = 12; - - // Additional HTTP bindings for the selector. Nested bindings must - // not contain an `additional_bindings` field themselves (that is, - // the nesting may only be one level deep). - repeated HttpRule additional_bindings = 11; -} - -// A custom pattern is used for defining custom HTTP verb. -message CustomHttpPattern { - // The name of this custom HTTP verb. - string kind = 1; - - // The path matched by this custom verb. - string path = 2; -} diff --git a/protobuf/protocol/cloudstate/crdt.proto b/protobuf/protocol/cloudstate/crdt.proto deleted file mode 100644 index 2c2bf35..0000000 --- a/protobuf/protocol/cloudstate/crdt.proto +++ /dev/null @@ -1,379 +0,0 @@ -// Copyright 2019 Lightbend Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// gRPC interface for Event Sourced Entity user functions. - -syntax = "proto3"; - -package cloudstate.crdt; - -// Any is used so that domain events defined according to the functions business domain can be embedded inside -// the protocol. -import "google/protobuf/any.proto"; -import "cloudstate/entity.proto"; - -option java_package = "io.cloudstate.protocol"; - - -// CRDT Protocol -// -// Note that while this protocol provides support for CRDTs, the data types sent across the protocol are not CRDTs -// themselves. It is the responsibility of the CloudState proxy to implement the CRDTs, merge functions, vector clocks -// etc, not the user function. The user function need only hold the current value in memory, and this protocol sends -// deltas to the user function to update its in memory value as necessary. These deltas have no way of dealing with -// conflicts, hence it important that the CloudState proxy always know what the state of the user functions in memory -// value is before sending a delta. If the CloudState proxy is not sure what the value is, eg because it has just sent -// an operation to the user function may have updated its value as a result, the proxy should wait until it gets the -// result of the operation back, to ensure its in memory value is in sync with the user function so that it can -// calculate deltas that won't conflict. -// -// The user function is expected to update its value both as the result of receiving deltas from the proxy, as well as -// when it sends deltas. It must not update its value in any other circumstance, updating the value in response to any -// other stimuli risks the value becoming out of sync with the CloudState proxy. The user function will not be sent -// back deltas as a result of its own changes. -// -// An invocation of handle is made for each entity being handled. It may be kept alive and used to handle multiple -// commands, and may subsequently be terminated if that entity becomes idle, or if the entity is deleted. Shutdown is -// typically done for efficiency reasons, unless the entity is explicitly deleted, a terminated handle stream does not -// mean the proxy has stopped tracking the state of the entity in its memory. -// -// Special care must be taken when working with maps and sets. The keys/values are google.protobuf.Any, which encodes -// the value as binary protobuf, however, serialized protobufs are not stable, two semantically equal objects could -// encode to different bytes. It is the responsibility of the user function to ensure that stable encodings are used. -service Crdt { - - // After invoking handle, the first message sent will always be a CrdtInit message, containing the entity ID, and, - // if it exists or is available, the current state of the entity. After that, one or more commands may be sent, - // as well as deltas as they arrive, and the entire state if either the entity is created, or the proxy wishes the - // user function to replace its entire state. - // - // The user function must respond with one reply per command in. They do not necessarily have to be sent in the same - // order that the commands were sent, the command ID is used to correlate commands to replies. - rpc handle(stream CrdtStreamIn) returns (stream CrdtStreamOut); -} - -// Message for the Crdt handle stream in. -message CrdtStreamIn { - oneof message { - - // Always sent first, and only once. - CrdtInit init = 1; - - // Sent to indicate the user function should replace its current state with this state. If the user function - // does not have a current state, either because the init function didn't send one and the user function hasn't - // updated the state itself in response to a command, or because the state was deleted, this must be sent before - // any deltas. - CrdtState state = 2; - - // A delta to be applied to the current state. May be sent at any time as long as the user function already has - // state. - CrdtDelta changed = 3; - - // Delete the entity. May be sent at any time. The user function should clear its state when it receives this. - // A proxy may decide to terminate the stream after sending this. - CrdtDelete deleted = 4; - - // A command, may be sent at any time. - Command command = 5; - - // A stream has been cancelled. - StreamCancelled stream_cancelled = 6; - } -} - -// Message for the Crdt handle stream out. -message CrdtStreamOut { - oneof message { - // A reply to an incoming command. Either one reply, or one failure, must be sent in response to each command. - CrdtReply reply = 1; - // A streamed message. - CrdtStreamedMessage streamed_message = 2; - // A stream cancelled response, may be sent in response to stream_cancelled. - CrdtStreamCancelledResponse stream_cancelled_response = 3; - // A failure. Either sent in response to a command, or sent if some other error occurs. - Failure failure = 4; - } -} - -// The CRDT state. This represents the full state of a CRDT. When received, a user function should replace the current -// state with this, not apply it as a delta. This includes both for the top level CRDT, and embedded CRDTs, such as -// the values of an ORMap. -message CrdtState { - oneof state { - // A Grow-only Counter - GCounterState gcounter = 1; - - // A Positve-Negative Counter - PNCounterState pncounter = 2; - - // A Grow-only Set - GSetState gset = 3; - - // An Observed-Removed Set - ORSetState orset = 4; - - // A Last-Write-Wins Register - LWWRegisterState lwwregister = 5; - - // A Flag - FlagState flag = 6; - - // An Observed-Removed Map - ORMapState ormap = 7; - - // A vote - VoteState vote = 8; - } -} - -// A Grow-only counter -// -// A G-Counter can only be incremented, it can't be decremented. -message GCounterState { - - // The current value of the counter. - uint64 value = 1; -} - -// A Positve-Negative Counter -// -// A PN-Counter can be both incremented and decremented. -message PNCounterState { - - // The current value of the counter. - int64 value = 1; -} - -// A Grow-only Set -// -// A G-Set can only have items added, items cannot be removed. -message GSetState { - - // The current items in the set. - repeated google.protobuf.Any items = 1; -} - -// An Observed-Removed Set -// -// An OR-Set may have items added and removed, with the condition that an item must be observed to be in the set before -// it is removed. -message ORSetState { - - // The current items in the set. - repeated google.protobuf.Any items = 1; -} - -// A Last-Write-Wins Register -// -// A LWW-Register holds a single value, with the current value being selected based on when it was last written. -// The time of the last write may either be determined using the proxies clock, or may be based on a custom, domain -// specific value. -message LWWRegisterState { - - // The current value of the register. - google.protobuf.Any value = 1; - - // The clock to use if this state needs to be merged with another one. - CrdtClock clock = 2; - - // The clock value if the clock in use is a custom clock. - int64 custom_clock_value = 3; -} - -// A Flag -// -// A Flag is a boolean value, that once set to true, stays true. -message FlagState { - - // The current value of the flag. - bool value = 1; -} - -// An Observed-Removed Map -// -// Like an OR-Set, an OR-Map may have items added and removed, with the condition that an item must be observed to be -// in the map before it is removed. The values of the map are CRDTs themselves. Different keys are allowed to use -// different CRDTs, and if an item is removed, and then replaced, the new value may be a different CRDT. -message ORMapState { - - // The entries of the map. - repeated ORMapEntry entries = 1; -} - -// An OR-Map entry. -message ORMapEntry { - - // The entry key. - google.protobuf.Any key = 1; - - // The value of the entry, a CRDT itself. - CrdtState value = 2; -} - -// A Vote. This allows nodes to vote on something. -message VoteState { - - // The number of votes for - uint32 votes_for = 1; - - // The total number of voters - uint32 total_voters = 2; - - // The vote of the current node, which is included in the above two numbers - bool self_vote = 3; -} - -// A CRDT delta -// -// Deltas only carry the change in value, not the full value (unless -message CrdtDelta { - oneof delta { - GCounterDelta gcounter = 1; - PNCounterDelta pncounter = 2; - GSetDelta gset = 3; - ORSetDelta orset = 4; - LWWRegisterDelta lwwregister = 5; - FlagDelta flag = 6; - ORMapDelta ormap = 7; - VoteDelta vote = 8; - } -} - -message GCounterDelta { - uint64 increment = 1; -} - -message PNCounterDelta { - sint64 change = 1; -} - -message GSetDelta { - repeated google.protobuf.Any added = 1; -} - -message ORSetDelta { - // If cleared is set, the set must be cleared before added is processed. - bool cleared = 1; - repeated google.protobuf.Any removed = 2; - repeated google.protobuf.Any added = 3; -} - -message LWWRegisterDelta { - google.protobuf.Any value = 1; - CrdtClock clock = 2; - int64 custom_clock_value = 3; -} - -message FlagDelta { - bool value = 1; -} - -message ORMapDelta { - bool cleared = 1; - repeated google.protobuf.Any removed = 2; - repeated ORMapEntryDelta updated = 3; - repeated ORMapEntry added = 4; -} - -message ORMapEntryDelta { - // The entry key. - google.protobuf.Any key = 1; - - CrdtDelta delta = 2; -} - -message VoteDelta { - // Only set by the user function to change the nodes current vote. - bool self_vote = 1; - - // Only set by the proxy to change the votes for and total voters. - int32 votes_for = 2; - int32 total_voters = 3; -} - -message CrdtInit { - string service_name = 1; - string entity_id = 2; - CrdtState state = 3; -} - -message CrdtDelete { -} - -message CrdtReply { - - int64 command_id = 1; - - ClientAction client_action = 2; - - repeated SideEffect side_effects = 4; - - CrdtStateAction state_action = 5; - - // If the request was streamed, setting this to true indicates that the command should - // be handled as a stream. Subsequently, the user function may send CrdtStreamedMessage, - // and a CrdtStreamCancelled message will be sent if the stream is cancelled (though - // not if the a CrdtStreamedMessage ends the stream first). - bool streamed = 6; -} - -message CrdtStateAction { - oneof action { - CrdtState create = 5; - CrdtDelta update = 6; - CrdtDelete delete = 7; - } - - CrdtWriteConsistency write_consistency = 8; -} - -// May be sent as often as liked if the first reply set streamed to true -message CrdtStreamedMessage { - - int64 command_id = 1; - - ClientAction client_action = 2; - - repeated SideEffect side_effects = 3; - - // Indicates the stream should end, no messages may be sent for this command after this. - bool end_stream = 4; -} - -message CrdtStreamCancelledResponse { - int64 command_id = 1; - - repeated SideEffect side_effects = 2; - - CrdtStateAction state_action = 3; -} - -enum CrdtWriteConsistency { - LOCAL = 0; - MAJORITY = 1; - ALL = 2; -} - -enum CrdtClock { - // Use the default clock for deciding the last write, which is the system clocks - // milliseconds since epoch. - DEFAULT = 0; - // Use the reverse semantics with the default clock, to enable first write wins. - REVERSE = 1; - // Use a custom clock value, set using custom_clock_value. - CUSTOM = 2; - // Use a custom clock value, but automatically increment it by one if the clock - // value from the current value is equal to the custom_clock_value. - CUSTOM_AUTO_INCREMENT = 3; -} diff --git a/protobuf/protocol/cloudstate/entity.proto b/protobuf/protocol/cloudstate/entity.proto deleted file mode 100644 index fa9a329..0000000 --- a/protobuf/protocol/cloudstate/entity.proto +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright 2019 Lightbend Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// gRPC interface for Event Sourced Entity user functions. - -syntax = "proto3"; - -package cloudstate; - -// Any is used so that domain events defined according to the functions business domain can be embedded inside -// the protocol. -import "google/protobuf/any.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/descriptor.proto"; - -option java_package = "io.cloudstate.protocol"; - -// A reply to the sender. -message Reply { - // The reply payload - google.protobuf.Any payload = 1; -} - -// Forwards handling of this request to another entity. -message Forward { - // The name of the service to forward to. - string service_name = 1; - // The name of the command. - string command_name = 2; - // The payload. - google.protobuf.Any payload = 3; -} - -// An action for the client -message ClientAction { - oneof action { - - // Send a reply - Reply reply = 1; - - // Forward to another entity - Forward forward = 2; - - // Send a failure to the client - Failure failure = 3; - } -} - -// A side effect to be done after this command is handled. -message SideEffect { - - // The name of the service to perform the side effect on. - string service_name = 1; - - // The name of the command. - string command_name = 2; - - // The payload of the command. - google.protobuf.Any payload = 3; - - // Whether this side effect should be performed synchronously, ie, before the reply is eventually - // sent, or not. - bool synchronous = 4; -} - -// A command. For each command received, a reply must be sent with a matching command id. -message Command { - - // The ID of the entity. - string entity_id = 1; - - // A command id. - int64 id = 2; - - // Command name - string name = 3; - - // The command payload. - google.protobuf.Any payload = 4; - - // Whether the command is streamed or not - bool streamed = 5; -} - -message StreamCancelled { - - // The ID of the entity - string entity_id = 1; - - // The command id - int64 id = 2; -} - -// A failure reply. If this is returned, it will be translated into a gRPC unknown -// error with the corresponding description if supplied. -message Failure { - - // The id of the command being replied to. Must match the input command. - int64 command_id = 1; - - // A description of the error. - string description = 2; -} - -message EntitySpec { - // This should be the Descriptors.FileDescriptorSet in proto serialized from as generated by: - // protoc --include_imports \ - // --proto_path= \ - // --descriptor_set_out=user-function.desc \ - // - bytes proto = 1; - - // The entities being served. - repeated Entity entities = 2; - - // Optional information about the service. - ServiceInfo service_info = 3; -} - -// Information about the service that proxy is proxying to. -// All of the information in here is optional. It may be useful for debug purposes. -message ServiceInfo { - - // The name of the service, eg, "shopping-cart". - string service_name = 1; - - // The version of the service. - string service_version = 2; - - // A description of the runtime for the service. Can be anything, but examples might be: - // - node v10.15.2 - // - OpenJDK Runtime Environment 1.8.0_192-b12 - string service_runtime = 3; - - // If using a support library, the name of that library, eg "cloudstate" - string support_library_name = 4; - - // The version of the support library being used. - string support_library_version = 5; -} - -message Entity { - - // The type of entity. By convention, this should be a fully qualified entity protocol grpc - // service name, for example, cloudstate.eventsourced.EventSourced. - string entity_type = 1; - - // The name of the service to load from the protobuf file. - string service_name = 2; - - // The ID to namespace state by. How this is used depends on the type of entity, for example, - // event sourced entities will prefix this to the persistence id. - string persistence_id = 3; -} - -message UserFunctionError { - string message = 1; -} - -message ProxyInfo { - int32 protocol_major_version = 1; - int32 protocol_minor_version = 2; - string proxy_name = 3; - string proxy_version = 4; - repeated string supported_entity_types = 5; -} - -// Entity discovery service. -service EntityDiscovery { - - // Discover what entities the user function wishes to serve. - rpc discover(ProxyInfo) returns (EntitySpec) {} - - // Report an error back to the user function. This will only be invoked to tell the user function - // that it has done something wrong, eg, violated the protocol, tried to use an entity type that - // isn't supported, or attempted to forward to an entity that doesn't exist, etc. These messages - // should be logged clearly for debugging purposes. - rpc reportError(UserFunctionError) returns (google.protobuf.Empty) {} -} diff --git a/protobuf/protocol/cloudstate/event_sourced.proto b/protobuf/protocol/cloudstate/event_sourced.proto deleted file mode 100644 index 2259ea0..0000000 --- a/protobuf/protocol/cloudstate/event_sourced.proto +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright 2019 Lightbend Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// gRPC interface for Event Sourced Entity user functions. - -syntax = "proto3"; - -package cloudstate.eventsourced; - -// Any is used so that domain events defined according to the functions business domain can be embedded inside -// the protocol. -import "google/protobuf/any.proto"; -import "cloudstate/entity.proto"; - -option java_package = "io.cloudstate.protocol"; - -// The init message. This will always be the first message sent to the entity when -// it is loaded. -message EventSourcedInit { - - string service_name = 1; - - // The ID of the entity. - string entity_id = 2; - - // If present the entity should initialise its state using this snapshot. - EventSourcedSnapshot snapshot = 3; -} - -// A snapshot -message EventSourcedSnapshot { - - // The sequence number when the snapshot was taken. - int64 snapshot_sequence = 1; - - // The snapshot. - google.protobuf.Any snapshot = 2; -} - -// An event. These will be sent to the entity when the entity starts up. -message EventSourcedEvent { - - // The sequence number of the event. - int64 sequence = 1; - - // The event payload. - google.protobuf.Any payload = 2; -} - -// A reply to a command. -message EventSourcedReply { - - // The id of the command being replied to. Must match the input command. - int64 command_id = 1; - - // The action to take - ClientAction client_action = 2; - - // Any side effects to perform - repeated SideEffect side_effects = 3; - - // A list of events to persist - these will be persisted before the reply - // is sent. - repeated google.protobuf.Any events = 4; - - // An optional snapshot to persist. It is assumed that this snapshot will have - // the state of any events in the events field applied to it. It is illegal to - // send a snapshot without sending any events. - google.protobuf.Any snapshot = 5; -} - -// Input message type for the gRPC stream in. -message EventSourcedStreamIn { - oneof message { - EventSourcedInit init = 1; - EventSourcedEvent event = 2; - Command command = 3; - } -} - -// Output message type for the gRPC stream out. -message EventSourcedStreamOut { - oneof message { - EventSourcedReply reply = 1; - Failure failure = 2; - } -} - -// The Entity service -service EventSourced { - - // The stream. One stream will be established per active entity. - // Once established, the first message sent will be Init, which contains the entity ID, and, - // if the entity has previously persisted a snapshot, it will contain that snapshot. It will - // then send zero to many event messages, one for each event previously persisted. The entity - // is expected to apply these to its state in a deterministic fashion. Once all the events - // are sent, one to many commands are sent, with new commands being sent as new requests for - // the entity come in. The entity is expected to reply to each command with exactly one reply - // message. The entity should reply in order, and any events that the entity requests to be - // persisted the entity should handle itself, applying them to its own state, as if they had - // arrived as events when the event stream was being replayed on load. - rpc handle(stream EventSourcedStreamIn) returns (stream EventSourcedStreamOut) {} -} diff --git a/protobuf/protocol/cloudstate/function.proto b/protobuf/protocol/cloudstate/function.proto deleted file mode 100644 index dedd7a4..0000000 --- a/protobuf/protocol/cloudstate/function.proto +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2019 Lightbend Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// gRPC interface for Event Sourced Entity user functions. - -syntax = "proto3"; - -package cloudstate.function; - -// Any is used so that domain events defined according to the functions business domain can be embedded inside -// the protocol. -import "google/protobuf/any.proto"; -import "cloudstate/entity.proto"; - -option java_package = "io.cloudstate.protocol"; - -message FunctionCommand { - // The name of the service this function is on. - string service_name = 2; - - // Command name - string name = 3; - - // The command payload. - google.protobuf.Any payload = 4; -} - -message FunctionReply { - - oneof response { - Reply reply = 2; - Forward forward = 3; - } - - repeated SideEffect side_effects = 4; -} - -service StatelessFunction { - - rpc handleUnary(FunctionCommand) returns (FunctionReply) {} - - rpc handleStreamedIn(stream FunctionCommand) returns (FunctionReply) {} - - rpc handleStreamedOut(FunctionCommand) returns (stream FunctionReply) {} - - rpc handleStreamed(stream FunctionCommand) returns (stream FunctionReply) {} - -} diff --git a/protobuf/proxy/grpc/reflection/v1alpha/reflection.proto b/protobuf/proxy/grpc/reflection/v1alpha/reflection.proto deleted file mode 100644 index 816852f..0000000 --- a/protobuf/proxy/grpc/reflection/v1alpha/reflection.proto +++ /dev/null @@ -1,136 +0,0 @@ -// Copyright 2016 gRPC authors. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Service exported by server reflection - -syntax = "proto3"; - -package grpc.reflection.v1alpha; - -service ServerReflection { - // The reflection service is structured as a bidirectional stream, ensuring - // all related requests go to a single server. - rpc ServerReflectionInfo(stream ServerReflectionRequest) - returns (stream ServerReflectionResponse); -} - -// The message sent by the client when calling ServerReflectionInfo method. -message ServerReflectionRequest { - string host = 1; - // To use reflection service, the client should set one of the following - // fields in message_request. The server distinguishes requests by their - // defined field and then handles them using corresponding methods. - oneof message_request { - // Find a proto file by the file name. - string file_by_filename = 3; - - // Find the proto file that declares the given fully-qualified symbol name. - // This field should be a fully-qualified symbol name - // (e.g. .[.] or .). - string file_containing_symbol = 4; - - // Find the proto file which defines an extension extending the given - // message type with the given field number. - ExtensionRequest file_containing_extension = 5; - - // Finds the tag numbers used by all known extensions of the given message - // type, and appends them to ExtensionNumberResponse in an undefined order. - // Its corresponding method is best-effort: it's not guaranteed that the - // reflection service will implement this method, and it's not guaranteed - // that this method will provide all extensions. Returns - // StatusCode::UNIMPLEMENTED if it's not implemented. - // This field should be a fully-qualified type name. The format is - // . - string all_extension_numbers_of_type = 6; - - // List the full names of registered services. The content will not be - // checked. - string list_services = 7; - } -} - -// The type name and extension number sent by the client when requesting -// file_containing_extension. -message ExtensionRequest { - // Fully-qualified type name. The format should be . - string containing_type = 1; - int32 extension_number = 2; -} - -// The message sent by the server to answer ServerReflectionInfo method. -message ServerReflectionResponse { - string valid_host = 1; - ServerReflectionRequest original_request = 2; - // The server set one of the following fields accroding to the message_request - // in the request. - oneof message_response { - // This message is used to answer file_by_filename, file_containing_symbol, - // file_containing_extension requests with transitive dependencies. As - // the repeated label is not allowed in oneof fields, we use a - // FileDescriptorResponse message to encapsulate the repeated fields. - // The reflection service is allowed to avoid sending FileDescriptorProtos - // that were previously sent in response to earlier requests in the stream. - FileDescriptorResponse file_descriptor_response = 4; - - // This message is used to answer all_extension_numbers_of_type requst. - ExtensionNumberResponse all_extension_numbers_response = 5; - - // This message is used to answer list_services request. - ListServiceResponse list_services_response = 6; - - // This message is used when an error occurs. - ErrorResponse error_response = 7; - } -} - -// Serialized FileDescriptorProto messages sent by the server answering -// a file_by_filename, file_containing_symbol, or file_containing_extension -// request. -message FileDescriptorResponse { - // Serialized FileDescriptorProto messages. We avoid taking a dependency on - // descriptor.proto, which uses proto2 only features, by making them opaque - // bytes instead. - repeated bytes file_descriptor_proto = 1; -} - -// A list of extension numbers sent by the server answering -// all_extension_numbers_of_type request. -message ExtensionNumberResponse { - // Full name of the base type, including the package name. The format - // is . - string base_type_name = 1; - repeated int32 extension_number = 2; -} - -// A list of ServiceResponse sent by the server answering list_services request. -message ListServiceResponse { - // The information of each service may be expanded in the future, so we use - // ServiceResponse message to encapsulate it. - repeated ServiceResponse service = 1; -} - -// The information of a single service used by ListServiceResponse to answer -// list_services request. -message ServiceResponse { - // Full name of a registered service, including its package name. The format - // is . - string name = 1; -} - -// The error code and error message sent by the server when an error occurs. -message ErrorResponse { - // This field uses the error codes defined in grpc::StatusCode. - int32 error_code = 1; - string error_message = 2; -} diff --git a/proxy/grpc/reflection/v1alpha/reflection_pb2.py b/proxy/grpc/reflection/v1alpha/reflection_pb2.py deleted file mode 100644 index 93717cd..0000000 --- a/proxy/grpc/reflection/v1alpha/reflection_pb2.py +++ /dev/null @@ -1,498 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: proxy/grpc/reflection/v1alpha/reflection.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='proxy/grpc/reflection/v1alpha/reflection.proto', - package='grpc.reflection.v1alpha', - syntax='proto3', - serialized_options=None, - serialized_pb=_b('\n.proxy/grpc/reflection/v1alpha/reflection.proto\x12\x17grpc.reflection.v1alpha\"\x8a\x02\n\x17ServerReflectionRequest\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x1a\n\x10\x66ile_by_filename\x18\x03 \x01(\tH\x00\x12 \n\x16\x66ile_containing_symbol\x18\x04 \x01(\tH\x00\x12N\n\x19\x66ile_containing_extension\x18\x05 \x01(\x0b\x32).grpc.reflection.v1alpha.ExtensionRequestH\x00\x12\'\n\x1d\x61ll_extension_numbers_of_type\x18\x06 \x01(\tH\x00\x12\x17\n\rlist_services\x18\x07 \x01(\tH\x00\x42\x11\n\x0fmessage_request\"E\n\x10\x45xtensionRequest\x12\x17\n\x0f\x63ontaining_type\x18\x01 \x01(\t\x12\x18\n\x10\x65xtension_number\x18\x02 \x01(\x05\"\xd1\x03\n\x18ServerReflectionResponse\x12\x12\n\nvalid_host\x18\x01 \x01(\t\x12J\n\x10original_request\x18\x02 \x01(\x0b\x32\x30.grpc.reflection.v1alpha.ServerReflectionRequest\x12S\n\x18\x66ile_descriptor_response\x18\x04 \x01(\x0b\x32/.grpc.reflection.v1alpha.FileDescriptorResponseH\x00\x12Z\n\x1e\x61ll_extension_numbers_response\x18\x05 \x01(\x0b\x32\x30.grpc.reflection.v1alpha.ExtensionNumberResponseH\x00\x12N\n\x16list_services_response\x18\x06 \x01(\x0b\x32,.grpc.reflection.v1alpha.ListServiceResponseH\x00\x12@\n\x0e\x65rror_response\x18\x07 \x01(\x0b\x32&.grpc.reflection.v1alpha.ErrorResponseH\x00\x42\x12\n\x10message_response\"7\n\x16\x46ileDescriptorResponse\x12\x1d\n\x15\x66ile_descriptor_proto\x18\x01 \x03(\x0c\"K\n\x17\x45xtensionNumberResponse\x12\x16\n\x0e\x62\x61se_type_name\x18\x01 \x01(\t\x12\x18\n\x10\x65xtension_number\x18\x02 \x03(\x05\"P\n\x13ListServiceResponse\x12\x39\n\x07service\x18\x01 \x03(\x0b\x32(.grpc.reflection.v1alpha.ServiceResponse\"\x1f\n\x0fServiceResponse\x12\x0c\n\x04name\x18\x01 \x01(\t\":\n\rErrorResponse\x12\x12\n\nerror_code\x18\x01 \x01(\x05\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x93\x01\n\x10ServerReflection\x12\x7f\n\x14ServerReflectionInfo\x12\x30.grpc.reflection.v1alpha.ServerReflectionRequest\x1a\x31.grpc.reflection.v1alpha.ServerReflectionResponse(\x01\x30\x01\x62\x06proto3') -) - - - - -_SERVERREFLECTIONREQUEST = _descriptor.Descriptor( - name='ServerReflectionRequest', - full_name='grpc.reflection.v1alpha.ServerReflectionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='host', full_name='grpc.reflection.v1alpha.ServerReflectionRequest.host', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='file_by_filename', full_name='grpc.reflection.v1alpha.ServerReflectionRequest.file_by_filename', index=1, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='file_containing_symbol', full_name='grpc.reflection.v1alpha.ServerReflectionRequest.file_containing_symbol', index=2, - number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='file_containing_extension', full_name='grpc.reflection.v1alpha.ServerReflectionRequest.file_containing_extension', index=3, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='all_extension_numbers_of_type', full_name='grpc.reflection.v1alpha.ServerReflectionRequest.all_extension_numbers_of_type', index=4, - number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='list_services', full_name='grpc.reflection.v1alpha.ServerReflectionRequest.list_services', index=5, - number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='message_request', full_name='grpc.reflection.v1alpha.ServerReflectionRequest.message_request', - index=0, containing_type=None, fields=[]), - ], - serialized_start=76, - serialized_end=342, -) - - -_EXTENSIONREQUEST = _descriptor.Descriptor( - name='ExtensionRequest', - full_name='grpc.reflection.v1alpha.ExtensionRequest', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='containing_type', full_name='grpc.reflection.v1alpha.ExtensionRequest.containing_type', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='extension_number', full_name='grpc.reflection.v1alpha.ExtensionRequest.extension_number', index=1, - number=2, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=344, - serialized_end=413, -) - - -_SERVERREFLECTIONRESPONSE = _descriptor.Descriptor( - name='ServerReflectionResponse', - full_name='grpc.reflection.v1alpha.ServerReflectionResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='valid_host', full_name='grpc.reflection.v1alpha.ServerReflectionResponse.valid_host', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='original_request', full_name='grpc.reflection.v1alpha.ServerReflectionResponse.original_request', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='file_descriptor_response', full_name='grpc.reflection.v1alpha.ServerReflectionResponse.file_descriptor_response', index=2, - number=4, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='all_extension_numbers_response', full_name='grpc.reflection.v1alpha.ServerReflectionResponse.all_extension_numbers_response', index=3, - number=5, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='list_services_response', full_name='grpc.reflection.v1alpha.ServerReflectionResponse.list_services_response', index=4, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='error_response', full_name='grpc.reflection.v1alpha.ServerReflectionResponse.error_response', index=5, - number=7, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name='message_response', full_name='grpc.reflection.v1alpha.ServerReflectionResponse.message_response', - index=0, containing_type=None, fields=[]), - ], - serialized_start=416, - serialized_end=881, -) - - -_FILEDESCRIPTORRESPONSE = _descriptor.Descriptor( - name='FileDescriptorResponse', - full_name='grpc.reflection.v1alpha.FileDescriptorResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='file_descriptor_proto', full_name='grpc.reflection.v1alpha.FileDescriptorResponse.file_descriptor_proto', index=0, - number=1, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=883, - serialized_end=938, -) - - -_EXTENSIONNUMBERRESPONSE = _descriptor.Descriptor( - name='ExtensionNumberResponse', - full_name='grpc.reflection.v1alpha.ExtensionNumberResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='base_type_name', full_name='grpc.reflection.v1alpha.ExtensionNumberResponse.base_type_name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='extension_number', full_name='grpc.reflection.v1alpha.ExtensionNumberResponse.extension_number', index=1, - number=2, type=5, cpp_type=1, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=940, - serialized_end=1015, -) - - -_LISTSERVICERESPONSE = _descriptor.Descriptor( - name='ListServiceResponse', - full_name='grpc.reflection.v1alpha.ListServiceResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='service', full_name='grpc.reflection.v1alpha.ListServiceResponse.service', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1017, - serialized_end=1097, -) - - -_SERVICERESPONSE = _descriptor.Descriptor( - name='ServiceResponse', - full_name='grpc.reflection.v1alpha.ServiceResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='grpc.reflection.v1alpha.ServiceResponse.name', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1099, - serialized_end=1130, -) - - -_ERRORRESPONSE = _descriptor.Descriptor( - name='ErrorResponse', - full_name='grpc.reflection.v1alpha.ErrorResponse', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='error_code', full_name='grpc.reflection.v1alpha.ErrorResponse.error_code', index=0, - number=1, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='error_message', full_name='grpc.reflection.v1alpha.ErrorResponse.error_message', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=1132, - serialized_end=1190, -) - -_SERVERREFLECTIONREQUEST.fields_by_name['file_containing_extension'].message_type = _EXTENSIONREQUEST -_SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'].fields.append( - _SERVERREFLECTIONREQUEST.fields_by_name['file_by_filename']) -_SERVERREFLECTIONREQUEST.fields_by_name['file_by_filename'].containing_oneof = _SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'] -_SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'].fields.append( - _SERVERREFLECTIONREQUEST.fields_by_name['file_containing_symbol']) -_SERVERREFLECTIONREQUEST.fields_by_name['file_containing_symbol'].containing_oneof = _SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'] -_SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'].fields.append( - _SERVERREFLECTIONREQUEST.fields_by_name['file_containing_extension']) -_SERVERREFLECTIONREQUEST.fields_by_name['file_containing_extension'].containing_oneof = _SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'] -_SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'].fields.append( - _SERVERREFLECTIONREQUEST.fields_by_name['all_extension_numbers_of_type']) -_SERVERREFLECTIONREQUEST.fields_by_name['all_extension_numbers_of_type'].containing_oneof = _SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'] -_SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'].fields.append( - _SERVERREFLECTIONREQUEST.fields_by_name['list_services']) -_SERVERREFLECTIONREQUEST.fields_by_name['list_services'].containing_oneof = _SERVERREFLECTIONREQUEST.oneofs_by_name['message_request'] -_SERVERREFLECTIONRESPONSE.fields_by_name['original_request'].message_type = _SERVERREFLECTIONREQUEST -_SERVERREFLECTIONRESPONSE.fields_by_name['file_descriptor_response'].message_type = _FILEDESCRIPTORRESPONSE -_SERVERREFLECTIONRESPONSE.fields_by_name['all_extension_numbers_response'].message_type = _EXTENSIONNUMBERRESPONSE -_SERVERREFLECTIONRESPONSE.fields_by_name['list_services_response'].message_type = _LISTSERVICERESPONSE -_SERVERREFLECTIONRESPONSE.fields_by_name['error_response'].message_type = _ERRORRESPONSE -_SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'].fields.append( - _SERVERREFLECTIONRESPONSE.fields_by_name['file_descriptor_response']) -_SERVERREFLECTIONRESPONSE.fields_by_name['file_descriptor_response'].containing_oneof = _SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'] -_SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'].fields.append( - _SERVERREFLECTIONRESPONSE.fields_by_name['all_extension_numbers_response']) -_SERVERREFLECTIONRESPONSE.fields_by_name['all_extension_numbers_response'].containing_oneof = _SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'] -_SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'].fields.append( - _SERVERREFLECTIONRESPONSE.fields_by_name['list_services_response']) -_SERVERREFLECTIONRESPONSE.fields_by_name['list_services_response'].containing_oneof = _SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'] -_SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'].fields.append( - _SERVERREFLECTIONRESPONSE.fields_by_name['error_response']) -_SERVERREFLECTIONRESPONSE.fields_by_name['error_response'].containing_oneof = _SERVERREFLECTIONRESPONSE.oneofs_by_name['message_response'] -_LISTSERVICERESPONSE.fields_by_name['service'].message_type = _SERVICERESPONSE -DESCRIPTOR.message_types_by_name['ServerReflectionRequest'] = _SERVERREFLECTIONREQUEST -DESCRIPTOR.message_types_by_name['ExtensionRequest'] = _EXTENSIONREQUEST -DESCRIPTOR.message_types_by_name['ServerReflectionResponse'] = _SERVERREFLECTIONRESPONSE -DESCRIPTOR.message_types_by_name['FileDescriptorResponse'] = _FILEDESCRIPTORRESPONSE -DESCRIPTOR.message_types_by_name['ExtensionNumberResponse'] = _EXTENSIONNUMBERRESPONSE -DESCRIPTOR.message_types_by_name['ListServiceResponse'] = _LISTSERVICERESPONSE -DESCRIPTOR.message_types_by_name['ServiceResponse'] = _SERVICERESPONSE -DESCRIPTOR.message_types_by_name['ErrorResponse'] = _ERRORRESPONSE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -ServerReflectionRequest = _reflection.GeneratedProtocolMessageType('ServerReflectionRequest', (_message.Message,), { - 'DESCRIPTOR' : _SERVERREFLECTIONREQUEST, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.ServerReflectionRequest) - }) -_sym_db.RegisterMessage(ServerReflectionRequest) - -ExtensionRequest = _reflection.GeneratedProtocolMessageType('ExtensionRequest', (_message.Message,), { - 'DESCRIPTOR' : _EXTENSIONREQUEST, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.ExtensionRequest) - }) -_sym_db.RegisterMessage(ExtensionRequest) - -ServerReflectionResponse = _reflection.GeneratedProtocolMessageType('ServerReflectionResponse', (_message.Message,), { - 'DESCRIPTOR' : _SERVERREFLECTIONRESPONSE, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.ServerReflectionResponse) - }) -_sym_db.RegisterMessage(ServerReflectionResponse) - -FileDescriptorResponse = _reflection.GeneratedProtocolMessageType('FileDescriptorResponse', (_message.Message,), { - 'DESCRIPTOR' : _FILEDESCRIPTORRESPONSE, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.FileDescriptorResponse) - }) -_sym_db.RegisterMessage(FileDescriptorResponse) - -ExtensionNumberResponse = _reflection.GeneratedProtocolMessageType('ExtensionNumberResponse', (_message.Message,), { - 'DESCRIPTOR' : _EXTENSIONNUMBERRESPONSE, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.ExtensionNumberResponse) - }) -_sym_db.RegisterMessage(ExtensionNumberResponse) - -ListServiceResponse = _reflection.GeneratedProtocolMessageType('ListServiceResponse', (_message.Message,), { - 'DESCRIPTOR' : _LISTSERVICERESPONSE, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.ListServiceResponse) - }) -_sym_db.RegisterMessage(ListServiceResponse) - -ServiceResponse = _reflection.GeneratedProtocolMessageType('ServiceResponse', (_message.Message,), { - 'DESCRIPTOR' : _SERVICERESPONSE, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.ServiceResponse) - }) -_sym_db.RegisterMessage(ServiceResponse) - -ErrorResponse = _reflection.GeneratedProtocolMessageType('ErrorResponse', (_message.Message,), { - 'DESCRIPTOR' : _ERRORRESPONSE, - '__module__' : 'proxy.grpc.reflection.v1alpha.reflection_pb2' - # @@protoc_insertion_point(class_scope:grpc.reflection.v1alpha.ErrorResponse) - }) -_sym_db.RegisterMessage(ErrorResponse) - - - -_SERVERREFLECTION = _descriptor.ServiceDescriptor( - name='ServerReflection', - full_name='grpc.reflection.v1alpha.ServerReflection', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=1193, - serialized_end=1340, - methods=[ - _descriptor.MethodDescriptor( - name='ServerReflectionInfo', - full_name='grpc.reflection.v1alpha.ServerReflection.ServerReflectionInfo', - index=0, - containing_service=None, - input_type=_SERVERREFLECTIONREQUEST, - output_type=_SERVERREFLECTIONRESPONSE, - serialized_options=None, - ), -]) -_sym_db.RegisterServiceDescriptor(_SERVERREFLECTION) - -DESCRIPTOR.services_by_name['ServerReflection'] = _SERVERREFLECTION - -# @@protoc_insertion_point(module_scope) diff --git a/proxy/grpc/reflection/v1alpha/reflection_pb2_grpc.py b/proxy/grpc/reflection/v1alpha/reflection_pb2_grpc.py deleted file mode 100644 index 3ee1004..0000000 --- a/proxy/grpc/reflection/v1alpha/reflection_pb2_grpc.py +++ /dev/null @@ -1,47 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from proxy.grpc.reflection.v1alpha import reflection_pb2 as proxy_dot_grpc_dot_reflection_dot_v1alpha_dot_reflection__pb2 - - -class ServerReflectionStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.ServerReflectionInfo = channel.stream_stream( - '/grpc.reflection.v1alpha.ServerReflection/ServerReflectionInfo', - request_serializer=proxy_dot_grpc_dot_reflection_dot_v1alpha_dot_reflection__pb2.ServerReflectionRequest.SerializeToString, - response_deserializer=proxy_dot_grpc_dot_reflection_dot_v1alpha_dot_reflection__pb2.ServerReflectionResponse.FromString, - ) - - -class ServerReflectionServicer(object): - # missing associated documentation comment in .proto file - pass - - def ServerReflectionInfo(self, request_iterator, context): - """The reflection service is structured as a bidirectional stream, ensuring - all related requests go to a single server. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_ServerReflectionServicer_to_server(servicer, server): - rpc_method_handlers = { - 'ServerReflectionInfo': grpc.stream_stream_rpc_method_handler( - servicer.ServerReflectionInfo, - request_deserializer=proxy_dot_grpc_dot_reflection_dot_v1alpha_dot_reflection__pb2.ServerReflectionRequest.FromString, - response_serializer=proxy_dot_grpc_dot_reflection_dot_v1alpha_dot_reflection__pb2.ServerReflectionResponse.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'grpc.reflection.v1alpha.ServerReflection', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index c2c5d63..0000000 --- a/requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -attrs==19.3.0 -google-api==0.1.12 -googleapis-common-protos >= 1.51.0 -grpcio==1.28.1 -grpcio-tools==1.28.1 -protobuf==3.11.3 -pytest==5.4.2 -six==1.14.0 diff --git a/scripts/compile-protbuf.sh b/scripts/compile-protbuf.sh deleted file mode 100644 index 4621b33..0000000 --- a/scripts/compile-protbuf.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -set -o nounset -set -o errexit -set -o pipefail - -# follow the basic steps here: https://grpc.io/docs/tutorials/basic/python/ - -python3 -m grpc_tools.protoc -Iprotobuf/protocol --python_out=. --grpc_python_out=. protobuf/protocol/cloudstate/entity.proto -python3 -m grpc_tools.protoc -Iprotobuf/protocol --python_out=. --grpc_python_out=. protobuf/protocol/cloudstate/event_sourced.proto -python3 -m grpc_tools.protoc -Iprotobuf/frontend --python_out=. --grpc_python_out=. protobuf/frontend/cloudstate/entity_key.proto -python3 -m grpc_tools.protoc -Iprotobuf/example/ -Iprotobuf/frontend --python_out=. --grpc_python_out=. protobuf/example/shoppingcart/shoppingcart.proto -python3 -m grpc_tools.protoc -Iprotobuf/ --python_out=. --grpc_python_out=. protobuf/proxy/grpc/reflection/v1alpha/reflection.proto -python3 -m grpc_tools.protoc -Iprotobuf/ --python_out=. --grpc_python_out=. protobuf/frontend/google/api/annotations.proto -python3 -m grpc_tools.protoc -Iprotobuf/frontend --python_out=. --grpc_python_out=. protobuf/frontend/google/api/annotations.proto -python3 -m grpc_tools.protoc -Iprotobuf/frontend --python_out=. --grpc_python_out=. protobuf/frontend/google/api/http.proto \ No newline at end of file diff --git a/scripts/fetch-cloudstate-pb.sh b/scripts/fetch-cloudstate-pb.sh deleted file mode 100644 index bf5873f..0000000 --- a/scripts/fetch-cloudstate-pb.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -set -o nounset -set -o errexit -set -o pipefail - -function fetch() { - local path=$1 - local tag=$2 - mkdir -p protobuf/$(dirname $path) - curl -o protobuf/${path} https://raw.githubusercontent.com/cloudstateio/cloudstate/${tag}/protocols/${path} - #sed 's/^option java_package.*/option go_package = "${go_package}";/' protobuf/${path} -} - -tag=$1 - -# Cloudstate protocol -fetch "protocol/cloudstate/entity.proto" $tag -fetch "protocol/cloudstate/event_sourced.proto" $tag -fetch "protocol/cloudstate/function.proto" $tag -fetch "protocol/cloudstate/crdt.proto" $tag - -# TCK shopping cart example -fetch "example/shoppingcart/shoppingcart.proto" $tag -fetch "example/shoppingcart/persistence/domain.proto" $tag - -# Cloudstate frontend -fetch "frontend/cloudstate/entity_key.proto" $tag - -# dependencies -fetch "proxy/grpc/reflection/v1alpha/reflection.proto" $tag -fetch "frontend/google/api/annotations.proto" $tag -fetch "frontend/google/api/http.proto" $tag \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index cf2449d..0000000 --- a/setup.cfg +++ /dev/null @@ -1,33 +0,0 @@ -[metadata] -url = https://cloudstate.io/ -author = Cloudstate -license = Apache -license_file = LICENSE -classifiers = - Intended Audience :: Developers - License :: OSI Approved :: Apache Software License - Operating System :: OS Independent - Programming Language :: Python - Programming Language :: Python :: 3.6 -project_urls = - Documentation = https://cloudstate.io/docs/user/lang/index.html - Source = https://github.com/cloudstateio/python-support - -[options] -python_requires = >=3.6 -packages = find: -include_package_data = true -zip_safe = false -install_requires = - protobuf == 3.11.3 - google-api == 0.1.12 - grpcio == 1.28.1 - grpcio-tools == 1.28.1 - attrs == 19.3.0 - googleapis-common-protos >= 1.51.0 - -[aliases] -test=pytest - -[tool:pytest] -python_files = cloudstate/tests/test_*.py \ No newline at end of file diff --git a/setup.py b/setup.py deleted file mode 100644 index a6f595f..0000000 --- a/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -from setuptools import setup, find_packages - -# Load version in cloudstate package. -exec(open('cloudstate/version.py').read()) -version = __version__ -name = 'cloudstate' - -print(f'package name: {name}, version: {version}', flush=True) - -setup(name=name, - version=version, - url='https://github.com/cloudstateio/python-support', - license='Apache 2.0', - description='Cloudstate Python Support Library', - packages=find_packages(exclude=['tests', 'shoppingcart']), - long_description=open('Description.md', 'r').read(), - long_description_content_type='text/markdown', - zip_safe=False) diff --git a/shoppingcart/Dockerfile b/shoppingcart/Dockerfile deleted file mode 100644 index 96cea7a..0000000 --- a/shoppingcart/Dockerfile +++ /dev/null @@ -1,9 +0,0 @@ -FROM python:3.8.0-slim - -COPY ./dist /dist -RUN pip install /dist/cloudstate-0.1.0-py3-none-any.whl - -WORKDIR /app -COPY ./shoppingcart ./shoppingcart -ENV PYTHONPATH=/app -ENTRYPOINT python ./shoppingcart/shopping_cart.py diff --git a/shoppingcart/domain_pb2.py b/shoppingcart/domain_pb2.py deleted file mode 100644 index 0ca0ef9..0000000 --- a/shoppingcart/domain_pb2.py +++ /dev/null @@ -1,201 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: domain.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='domain.proto', - package='com.example.shoppingcart.persistence', - syntax='proto3', - serialized_options=None, - serialized_pb=b'\n\x0c\x64omain.proto\x12$com.example.shoppingcart.persistence\"=\n\x08LineItem\x12\x11\n\tproductId\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08quantity\x18\x03 \x01(\x05\"I\n\tItemAdded\x12<\n\x04item\x18\x01 \x01(\x0b\x32..com.example.shoppingcart.persistence.LineItem\" \n\x0bItemRemoved\x12\x11\n\tproductId\x18\x01 \x01(\t\"E\n\x04\x43\x61rt\x12=\n\x05items\x18\x01 \x03(\x0b\x32..com.example.shoppingcart.persistence.LineItemb\x06proto3' -) - - - - -_LINEITEM = _descriptor.Descriptor( - name='LineItem', - full_name='com.example.shoppingcart.persistence.LineItem', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='productId', full_name='com.example.shoppingcart.persistence.LineItem.productId', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='name', full_name='com.example.shoppingcart.persistence.LineItem.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='quantity', full_name='com.example.shoppingcart.persistence.LineItem.quantity', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=54, - serialized_end=115, -) - - -_ITEMADDED = _descriptor.Descriptor( - name='ItemAdded', - full_name='com.example.shoppingcart.persistence.ItemAdded', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='item', full_name='com.example.shoppingcart.persistence.ItemAdded.item', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=117, - serialized_end=190, -) - - -_ITEMREMOVED = _descriptor.Descriptor( - name='ItemRemoved', - full_name='com.example.shoppingcart.persistence.ItemRemoved', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='productId', full_name='com.example.shoppingcart.persistence.ItemRemoved.productId', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=192, - serialized_end=224, -) - - -_CART = _descriptor.Descriptor( - name='Cart', - full_name='com.example.shoppingcart.persistence.Cart', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='items', full_name='com.example.shoppingcart.persistence.Cart.items', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=226, - serialized_end=295, -) - -_ITEMADDED.fields_by_name['item'].message_type = _LINEITEM -_CART.fields_by_name['items'].message_type = _LINEITEM -DESCRIPTOR.message_types_by_name['LineItem'] = _LINEITEM -DESCRIPTOR.message_types_by_name['ItemAdded'] = _ITEMADDED -DESCRIPTOR.message_types_by_name['ItemRemoved'] = _ITEMREMOVED -DESCRIPTOR.message_types_by_name['Cart'] = _CART -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -LineItem = _reflection.GeneratedProtocolMessageType('LineItem', (_message.Message,), { - 'DESCRIPTOR' : _LINEITEM, - '__module__' : 'domain_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.persistence.LineItem) - }) -_sym_db.RegisterMessage(LineItem) - -ItemAdded = _reflection.GeneratedProtocolMessageType('ItemAdded', (_message.Message,), { - 'DESCRIPTOR' : _ITEMADDED, - '__module__' : 'domain_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.persistence.ItemAdded) - }) -_sym_db.RegisterMessage(ItemAdded) - -ItemRemoved = _reflection.GeneratedProtocolMessageType('ItemRemoved', (_message.Message,), { - 'DESCRIPTOR' : _ITEMREMOVED, - '__module__' : 'domain_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.persistence.ItemRemoved) - }) -_sym_db.RegisterMessage(ItemRemoved) - -Cart = _reflection.GeneratedProtocolMessageType('Cart', (_message.Message,), { - 'DESCRIPTOR' : _CART, - '__module__' : 'domain_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.persistence.Cart) - }) -_sym_db.RegisterMessage(Cart) - - -# @@protoc_insertion_point(module_scope) diff --git a/shoppingcart/requirements.txt b/shoppingcart/requirements.txt deleted file mode 100644 index 6961028..0000000 --- a/shoppingcart/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -attrs==19.3.0 -grpcio==1.28.1 -grpcio-tools==1.28.1 -protobuf==3.11.3 -pytest==5.4.2 -six==1.14.0 diff --git a/shoppingcart/shopping_cart.py b/shoppingcart/shopping_cart.py deleted file mode 100644 index da0d4bd..0000000 --- a/shoppingcart/shopping_cart.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -from cloudstate.cloudstate import CloudState -from shoppingcart.shopping_cart_entity import entity as shopping_cart_entity - -if __name__ == '__main__': - CloudState()\ - .port('8090')\ - .register_event_sourced_entity(shopping_cart_entity)\ - .start() diff --git a/shoppingcart/shopping_cart_entity.py b/shoppingcart/shopping_cart_entity.py deleted file mode 100644 index 2553cd7..0000000 --- a/shoppingcart/shopping_cart_entity.py +++ /dev/null @@ -1,102 +0,0 @@ -""" -Copyright 2020 Lightbend Inc. -Licensed under the Apache License, Version 2.0. -""" - -from dataclasses import dataclass, field -from typing import MutableMapping - -from google.protobuf.empty_pb2 import Empty - -from cloudstate.event_sourced_context import EventSourcedCommandContext -from cloudstate.event_sourced_entity import EventSourcedEntity -from shoppingcart.domain_pb2 import (Cart as DomainCart, LineItem as DomainLineItem, ItemAdded, ItemRemoved) -from shoppingcart.shoppingcart_pb2 import (Cart, LineItem, AddLineItem, RemoveLineItem) -from shoppingcart.shoppingcart_pb2 import (_SHOPPINGCART, DESCRIPTOR as FILE_DESCRIPTOR) - - -@dataclass -class ShoppingCartState: - entity_id: str - cart: MutableMapping[str, LineItem] = field(default_factory=dict) - - -def init(entity_id: str) -> ShoppingCartState: - return ShoppingCartState(entity_id) - - -entity = EventSourcedEntity(_SHOPPINGCART, [FILE_DESCRIPTOR], init) - - -def to_domain_line_item(item): - domain_item = DomainLineItem() - domain_item.productId = item.product_id - domain_item.name = item.name - domain_item.quantity = item.quantity - return domain_item - - -@entity.snapshot() -def snapshot(state: ShoppingCartState): - cart = DomainCart() - cart.items = [to_domain_line_item(item) for item in state.cart.values()] - return cart - - -def to_line_item(domain_item): - item = LineItem() - item.product_id = domain_item.productId - item.name = domain_item.name - item.quantity = domain_item.quantity - return item - - -@entity.snapshot_handler() -def handle_snapshot(state: ShoppingCartState, domain_cart: DomainCart): - state.cart = {domain_item.productId: to_line_item(domain_item) for domain_item in domain_cart.items} - - -@entity.event_handler(ItemAdded) -def item_added(state: ShoppingCartState, event: ItemAdded): - cart = state.cart - if event.item.productId in cart: - item = cart[event.item.productId] - item.quantity = item.quantity + event.item.quantity - else: - item = to_line_item(event.item) - cart[item.product_id] = item - - -@entity.event_handler(ItemRemoved) -def item_removed(state: ShoppingCartState, event: ItemRemoved): - del state.cart[event.productId] - - -@entity.command_handler("GetCart") -def get_cart(state: ShoppingCartState): - cart = Cart() - cart.items.extend(state.cart.values()) - return cart - - -@entity.command_handler("AddItem") -def add_item(item: AddLineItem, ctx: EventSourcedCommandContext): - if item.quantity <= 0: - ctx.fail("Cannot add negative quantity of to item {}".format(item.productId)) - else: - item_added_event = ItemAdded() - item_added_event.item.CopyFrom(to_domain_line_item(item)) - ctx.emit(item_added_event) - return Empty() - - -@entity.command_handler("RemoveItem") -def remove_item(state: ShoppingCartState, item: RemoveLineItem, ctx: EventSourcedCommandContext): - cart = state.cart - if item.product_id not in cart: - ctx.fail("Cannot remove item {} because it is not in the cart.".format(item.productId)) - else: - item_removed_event = ItemRemoved() - item_removed_event.productId = item.product_id - ctx.emit(item_removed_event) - return Empty() diff --git a/shoppingcart/shoppingcart_pb2.py b/shoppingcart/shoppingcart_pb2.py deleted file mode 100644 index 40c4e99..0000000 --- a/shoppingcart/shoppingcart_pb2.py +++ /dev/null @@ -1,319 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: shoppingcart/shoppingcart.proto - -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from cloudstate import entity_key_pb2 as cloudstate_dot_entity__key__pb2 -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import http_pb2 as google_dot_api_dot_http__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='shoppingcart/shoppingcart.proto', - package='com.example.shoppingcart', - syntax='proto3', - serialized_options=None, - serialized_pb=_b('\n\x1fshoppingcart/shoppingcart.proto\x12\x18\x63om.example.shoppingcart\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1b\x63loudstate/entity_key.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x15google/api/http.proto\"X\n\x0b\x41\x64\x64LineItem\x12\x15\n\x07user_id\x18\x01 \x01(\tB\x04\x90\xb5\x18\x01\x12\x12\n\nproduct_id\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x10\n\x08quantity\x18\x04 \x01(\x05\";\n\x0eRemoveLineItem\x12\x15\n\x07user_id\x18\x01 \x01(\tB\x04\x90\xb5\x18\x01\x12\x12\n\nproduct_id\x18\x02 \x01(\t\"(\n\x0fGetShoppingCart\x12\x15\n\x07user_id\x18\x01 \x01(\tB\x04\x90\xb5\x18\x01\">\n\x08LineItem\x12\x12\n\nproduct_id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08quantity\x18\x03 \x01(\x05\"9\n\x04\x43\x61rt\x12\x31\n\x05items\x18\x01 \x03(\x0b\x32\".com.example.shoppingcart.LineItem2\x94\x03\n\x0cShoppingCart\x12n\n\x07\x41\x64\x64Item\x12%.com.example.shoppingcart.AddLineItem\x1a\x16.google.protobuf.Empty\"$\x82\xd3\xe4\x93\x02\x1e\"\x19/cart/{user_id}/items/add:\x01*\x12\x81\x01\n\nRemoveItem\x12(.com.example.shoppingcart.RemoveLineItem\x1a\x16.google.protobuf.Empty\"1\x82\xd3\xe4\x93\x02+\")/cart/{user_id}/items/{product_id}/remove\x12\x8f\x01\n\x07GetCart\x12).com.example.shoppingcart.GetShoppingCart\x1a\x1e.com.example.shoppingcart.Cart\"9\x82\xd3\xe4\x93\x02\x33\x12\x10/carts/{user_id}Z\x1f\x12\x16/carts/{user_id}/itemsb\x05itemsb\x06proto3') - , - dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,cloudstate_dot_entity__key__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_http__pb2.DESCRIPTOR,]) - - - - -_ADDLINEITEM = _descriptor.Descriptor( - name='AddLineItem', - full_name='com.example.shoppingcart.AddLineItem', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='user_id', full_name='com.example.shoppingcart.AddLineItem.user_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=_b('\220\265\030\001'), file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='product_id', full_name='com.example.shoppingcart.AddLineItem.product_id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='name', full_name='com.example.shoppingcart.AddLineItem.name', index=2, - number=3, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='quantity', full_name='com.example.shoppingcart.AddLineItem.quantity', index=3, - number=4, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=172, - serialized_end=260, -) - - -_REMOVELINEITEM = _descriptor.Descriptor( - name='RemoveLineItem', - full_name='com.example.shoppingcart.RemoveLineItem', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='user_id', full_name='com.example.shoppingcart.RemoveLineItem.user_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=_b('\220\265\030\001'), file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='product_id', full_name='com.example.shoppingcart.RemoveLineItem.product_id', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=262, - serialized_end=321, -) - - -_GETSHOPPINGCART = _descriptor.Descriptor( - name='GetShoppingCart', - full_name='com.example.shoppingcart.GetShoppingCart', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='user_id', full_name='com.example.shoppingcart.GetShoppingCart.user_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=_b('\220\265\030\001'), file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=323, - serialized_end=363, -) - - -_LINEITEM = _descriptor.Descriptor( - name='LineItem', - full_name='com.example.shoppingcart.LineItem', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='product_id', full_name='com.example.shoppingcart.LineItem.product_id', index=0, - number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='name', full_name='com.example.shoppingcart.LineItem.name', index=1, - number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - _descriptor.FieldDescriptor( - name='quantity', full_name='com.example.shoppingcart.LineItem.quantity', index=2, - number=3, type=5, cpp_type=1, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=365, - serialized_end=427, -) - - -_CART = _descriptor.Descriptor( - name='Cart', - full_name='com.example.shoppingcart.Cart', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='items', full_name='com.example.shoppingcart.Cart.items', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - serialized_options=None, file=DESCRIPTOR), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - serialized_options=None, - is_extendable=False, - syntax='proto3', - extension_ranges=[], - oneofs=[ - ], - serialized_start=429, - serialized_end=486, -) - -_CART.fields_by_name['items'].message_type = _LINEITEM -DESCRIPTOR.message_types_by_name['AddLineItem'] = _ADDLINEITEM -DESCRIPTOR.message_types_by_name['RemoveLineItem'] = _REMOVELINEITEM -DESCRIPTOR.message_types_by_name['GetShoppingCart'] = _GETSHOPPINGCART -DESCRIPTOR.message_types_by_name['LineItem'] = _LINEITEM -DESCRIPTOR.message_types_by_name['Cart'] = _CART -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -AddLineItem = _reflection.GeneratedProtocolMessageType('AddLineItem', (_message.Message,), { - 'DESCRIPTOR' : _ADDLINEITEM, - '__module__' : 'shoppingcart.shoppingcart_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.AddLineItem) - }) -_sym_db.RegisterMessage(AddLineItem) - -RemoveLineItem = _reflection.GeneratedProtocolMessageType('RemoveLineItem', (_message.Message,), { - 'DESCRIPTOR' : _REMOVELINEITEM, - '__module__' : 'shoppingcart.shoppingcart_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.RemoveLineItem) - }) -_sym_db.RegisterMessage(RemoveLineItem) - -GetShoppingCart = _reflection.GeneratedProtocolMessageType('GetShoppingCart', (_message.Message,), { - 'DESCRIPTOR' : _GETSHOPPINGCART, - '__module__' : 'shoppingcart.shoppingcart_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.GetShoppingCart) - }) -_sym_db.RegisterMessage(GetShoppingCart) - -LineItem = _reflection.GeneratedProtocolMessageType('LineItem', (_message.Message,), { - 'DESCRIPTOR' : _LINEITEM, - '__module__' : 'shoppingcart.shoppingcart_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.LineItem) - }) -_sym_db.RegisterMessage(LineItem) - -Cart = _reflection.GeneratedProtocolMessageType('Cart', (_message.Message,), { - 'DESCRIPTOR' : _CART, - '__module__' : 'shoppingcart.shoppingcart_pb2' - # @@protoc_insertion_point(class_scope:com.example.shoppingcart.Cart) - }) -_sym_db.RegisterMessage(Cart) - - -_ADDLINEITEM.fields_by_name['user_id']._options = None -_REMOVELINEITEM.fields_by_name['user_id']._options = None -_GETSHOPPINGCART.fields_by_name['user_id']._options = None - -_SHOPPINGCART = _descriptor.ServiceDescriptor( - name='ShoppingCart', - full_name='com.example.shoppingcart.ShoppingCart', - file=DESCRIPTOR, - index=0, - serialized_options=None, - serialized_start=489, - serialized_end=893, - methods=[ - _descriptor.MethodDescriptor( - name='AddItem', - full_name='com.example.shoppingcart.ShoppingCart.AddItem', - index=0, - containing_service=None, - input_type=_ADDLINEITEM, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b('\202\323\344\223\002\036\"\031/cart/{user_id}/items/add:\001*'), - ), - _descriptor.MethodDescriptor( - name='RemoveItem', - full_name='com.example.shoppingcart.ShoppingCart.RemoveItem', - index=1, - containing_service=None, - input_type=_REMOVELINEITEM, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b('\202\323\344\223\002+\")/cart/{user_id}/items/{product_id}/remove'), - ), - _descriptor.MethodDescriptor( - name='GetCart', - full_name='com.example.shoppingcart.ShoppingCart.GetCart', - index=2, - containing_service=None, - input_type=_GETSHOPPINGCART, - output_type=_CART, - serialized_options=_b('\202\323\344\223\0023\022\020/carts/{user_id}Z\037\022\026/carts/{user_id}/itemsb\005items'), - ), -]) -_sym_db.RegisterServiceDescriptor(_SHOPPINGCART) - -DESCRIPTOR.services_by_name['ShoppingCart'] = _SHOPPINGCART - -# @@protoc_insertion_point(module_scope) diff --git a/shoppingcart/shoppingcart_pb2_grpc.py b/shoppingcart/shoppingcart_pb2_grpc.py deleted file mode 100644 index 07e1f6d..0000000 --- a/shoppingcart/shoppingcart_pb2_grpc.py +++ /dev/null @@ -1,81 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from shoppingcart import shoppingcart_pb2 as shoppingcart_dot_shoppingcart__pb2 - - -class ShoppingCartStub(object): - # missing associated documentation comment in .proto file - pass - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.AddItem = channel.unary_unary( - '/com.example.shoppingcart.ShoppingCart/AddItem', - request_serializer=shoppingcart_dot_shoppingcart__pb2.AddLineItem.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.RemoveItem = channel.unary_unary( - '/com.example.shoppingcart.ShoppingCart/RemoveItem', - request_serializer=shoppingcart_dot_shoppingcart__pb2.RemoveLineItem.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.GetCart = channel.unary_unary( - '/com.example.shoppingcart.ShoppingCart/GetCart', - request_serializer=shoppingcart_dot_shoppingcart__pb2.GetShoppingCart.SerializeToString, - response_deserializer=shoppingcart_dot_shoppingcart__pb2.Cart.FromString, - ) - - -class ShoppingCartServicer(object): - # missing associated documentation comment in .proto file - pass - - def AddItem(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def RemoveItem(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - def GetCart(self, request, context): - # missing associated documentation comment in .proto file - pass - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') - - -def add_ShoppingCartServicer_to_server(servicer, server): - rpc_method_handlers = { - 'AddItem': grpc.unary_unary_rpc_method_handler( - servicer.AddItem, - request_deserializer=shoppingcart_dot_shoppingcart__pb2.AddLineItem.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'RemoveItem': grpc.unary_unary_rpc_method_handler( - servicer.RemoveItem, - request_deserializer=shoppingcart_dot_shoppingcart__pb2.RemoveLineItem.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - 'GetCart': grpc.unary_unary_rpc_method_handler( - servicer.GetCart, - request_deserializer=shoppingcart_dot_shoppingcart__pb2.GetShoppingCart.FromString, - response_serializer=shoppingcart_dot_shoppingcart__pb2.Cart.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - 'com.example.shoppingcart.ShoppingCart', rpc_method_handlers) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/tck.sh b/tck.sh deleted file mode 100755 index a7945e6..0000000 --- a/tck.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env bash - -# Run Python shoppingcart here -python shoppingcart/shopping_cart.py & -pid=$! -echo "Start python shoppingcart user-function with pid $pid" -sleep 3 - -# The host network usually doesn't work on MacOS systems -# so we should address this issue and use the docker's internal network when the system is Mac -echo "Starting Cloudstate proxy in development mode via docker" -docker run -d --name cloudstate-proxy --net=host -e USER_FUNCTION_PORT=8090 cloudstateio/cloudstate-proxy-dev-mode - -echo "Starting TCK via docker" -docker run --rm --name cloudstate-tck --net=host cloudstateio/cloudstate-tck -status=$? - -echo "Removing cloudstate-proxy docker image" -docker rm -f cloudstate-proxy - -echo "Stopping Shoppingcart user-function" -kill -9 $pid - -exit $status diff --git a/tck/build_tck_docker_image.sh b/tck/build_tck_docker_image.sh deleted file mode 100755 index 229e313..0000000 --- a/tck/build_tck_docker_image.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env sh - -docker build -t gcr.io/mrcllnz/cloudstate-python-tck -f shoppingcart/Dockerfile . \ No newline at end of file diff --git a/tck/run_tck.sh b/tck/run_tck.sh deleted file mode 100755 index 56db5eb..0000000 --- a/tck/run_tck.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env bash -set -o nounset - -function rnd() { - cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w ${1:-32} | head -n 1 -} - -FUNC_IMAGE=${1:-gcr.io/mrcllnz/cloudstate-python-tck:latest} -FUNC="cloudstate-function-$(rnd)" -PROXY_IMAGE=${2:-cloudstateio/cloudstate-proxy-dev-mode:latest} -PROXY="cloudstate-proxy-$(rnd)" -TCK_IMAGE=${3:-cloudstateio/cloudstate-tck:latest} -TCK="cloudstate-tck-$(rnd)" - -finally() { - docker rm -f "$PROXY" - docker rm -f "$FUNC" -} -trap finally EXIT -set -x - -# run the function and the proxy -docker run -d --name "$FUNC" --net=host "${FUNC_IMAGE}" || exit $? -docker run -d --name "$PROXY" --net=host -e USER_FUNCTION_PORT=8090 "${PROXY_IMAGE}" || exit $? - -# run the tck -docker run --rm --name "$TCK" --net=host "${TCK_IMAGE}" -tck_status=$? -if [ "$tck_status" -ne "0" ]; then - docker logs "$FUNC" -fi -exit $tck_status diff --git a/venv/lib/python3.7/site-packages/google/protobuf/descriptor.py b/venv/lib/python3.7/site-packages/google/protobuf/descriptor.py deleted file mode 100644 index 2c2a079..0000000 --- a/venv/lib/python3.7/site-packages/google/protobuf/descriptor.py +++ /dev/null @@ -1,1077 +0,0 @@ -# Protocol Buffers - Google's data interchange format -# Copyright 2008 Google Inc. All rights reserved. -# https://developers.google.com/protocol-buffers/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Descriptors essentially contain exactly the information found in a .proto -file, in types that make this information accessible in Python. -""" - -__author__ = 'robinson@google.com (Will Robinson)' - -import threading -import six - -from google.protobuf.internal import api_implementation - -_USE_C_DESCRIPTORS = False -if api_implementation.Type() == 'cpp': - # Used by MakeDescriptor in cpp mode - import binascii - import os - from google.protobuf.pyext import _message - _USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False) - - -class Error(Exception): - """Base error for this module.""" - - -class TypeTransformationError(Error): - """Error transforming between python proto type and corresponding C++ type.""" - - -if _USE_C_DESCRIPTORS: - # This metaclass allows to override the behavior of code like - # isinstance(my_descriptor, FieldDescriptor) - # and make it return True when the descriptor is an instance of the extension - # type written in C++. - class DescriptorMetaclass(type): - def __instancecheck__(cls, obj): - if super(DescriptorMetaclass, cls).__instancecheck__(obj): - return True - if isinstance(obj, cls._C_DESCRIPTOR_CLASS): - return True - return False -else: - # The standard metaclass; nothing changes. - DescriptorMetaclass = type - - -class _Lock(object): - """Wrapper class of threading.Lock(), which is allowed by 'with'.""" - - def __new__(cls): - self = object.__new__(cls) - self._lock = threading.Lock() # pylint: disable=protected-access - return self - - def __enter__(self): - self._lock.acquire() - - def __exit__(self, exc_type, exc_value, exc_tb): - self._lock.release() - - -_lock = threading.Lock() - - -class DescriptorBase(six.with_metaclass(DescriptorMetaclass)): - - """Descriptors base class. - - This class is the base of all descriptor classes. It provides common options - related functionality. - - Attributes: - has_options: True if the descriptor has non-default options. Usually it - is not necessary to read this -- just call GetOptions() which will - happily return the default instance. However, it's sometimes useful - for efficiency, and also useful inside the protobuf implementation to - avoid some bootstrapping issues. - """ - - if _USE_C_DESCRIPTORS: - # The class, or tuple of classes, that are considered as "virtual - # subclasses" of this descriptor class. - _C_DESCRIPTOR_CLASS = () - - def __init__(self, options, serialized_options, options_class_name): - """Initialize the descriptor given its options message and the name of the - class of the options message. The name of the class is required in case - the options message is None and has to be created. - """ - self._options = options - self._options_class_name = options_class_name - self._serialized_options = serialized_options - - # Does this descriptor have non-default options? - self.has_options = (options is not None) or (serialized_options is not None) - - def _SetOptions(self, options, options_class_name): - """Sets the descriptor's options - - This function is used in generated proto2 files to update descriptor - options. It must not be used outside proto2. - """ - self._options = options - self._options_class_name = options_class_name - - # Does this descriptor have non-default options? - self.has_options = options is not None - - def GetOptions(self): - """Retrieves descriptor options. - - This method returns the options set or creates the default options for the - descriptor. - """ - if self._options: - return self._options - - from google.protobuf import descriptor_pb2 - try: - options_class = getattr(descriptor_pb2, - self._options_class_name) - except AttributeError: - raise RuntimeError('Unknown options class name %s!' % - (self._options_class_name)) - - with _lock: - if self._serialized_options is None: - self._options = options_class() - else: - self._options = _ParseOptions(options_class(), - self._serialized_options) - - return self._options - - -class _NestedDescriptorBase(DescriptorBase): - """Common class for descriptors that can be nested.""" - - def __init__(self, options, options_class_name, name, full_name, - file, containing_type, serialized_start=None, - serialized_end=None, serialized_options=None): - """Constructor. - - Args: - options: Protocol message options or None - to use default message options. - options_class_name: (str) The class name of the above options. - - name: (str) Name of this protocol message type. - full_name: (str) Fully-qualified name of this protocol message type, - which will include protocol "package" name and the name of any - enclosing types. - file: (FileDescriptor) Reference to file info. - containing_type: if provided, this is a nested descriptor, with this - descriptor as parent, otherwise None. - serialized_start: The start index (inclusive) in block in the - file.serialized_pb that describes this descriptor. - serialized_end: The end index (exclusive) in block in the - file.serialized_pb that describes this descriptor. - serialized_options: Protocol message serilized options or None. - """ - super(_NestedDescriptorBase, self).__init__( - options, serialized_options, options_class_name) - - self.name = name - # TODO(falk): Add function to calculate full_name instead of having it in - # memory? - self.full_name = full_name - self.file = file - self.containing_type = containing_type - - self._serialized_start = serialized_start - self._serialized_end = serialized_end - - def CopyToProto(self, proto): - """Copies this to the matching proto in descriptor_pb2. - - Args: - proto: An empty proto instance from descriptor_pb2. - - Raises: - Error: If self couldnt be serialized, due to to few constructor arguments. - """ - if (self.file is not None and - self._serialized_start is not None and - self._serialized_end is not None): - proto.ParseFromString(self.file.serialized_pb[ - self._serialized_start:self._serialized_end]) - else: - raise Error('Descriptor does not contain serialization.') - - -class Descriptor(_NestedDescriptorBase): - - """Descriptor for a protocol message type. - - A Descriptor instance has the following attributes: - - name: (str) Name of this protocol message type. - full_name: (str) Fully-qualified name of this protocol message type, - which will include protocol "package" name and the name of any - enclosing types. - - containing_type: (Descriptor) Reference to the descriptor of the - type containing us, or None if this is top-level. - - fields: (list of FieldDescriptors) Field descriptors for all - fields in this type. - fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor - objects as in |fields|, but indexed by "number" attribute in each - FieldDescriptor. - fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor - objects as in |fields|, but indexed by "name" attribute in each - FieldDescriptor. - fields_by_camelcase_name: (dict str -> FieldDescriptor) Same - FieldDescriptor objects as in |fields|, but indexed by - "camelcase_name" attribute in each FieldDescriptor. - - nested_types: (list of Descriptors) Descriptor references - for all protocol message types nested within this one. - nested_types_by_name: (dict str -> Descriptor) Same Descriptor - objects as in |nested_types|, but indexed by "name" attribute - in each Descriptor. - - enum_types: (list of EnumDescriptors) EnumDescriptor references - for all enums contained within this type. - enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor - objects as in |enum_types|, but indexed by "name" attribute - in each EnumDescriptor. - enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping - from enum value name to EnumValueDescriptor for that value. - - extensions: (list of FieldDescriptor) All extensions defined directly - within this message type (NOT within a nested type). - extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor - objects as |extensions|, but indexed by "name" attribute of each - FieldDescriptor. - - is_extendable: Does this type define any extension ranges? - - oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields - in this message. - oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|, - but indexed by "name" attribute. - - file: (FileDescriptor) Reference to file descriptor. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.Descriptor - - def __new__(cls, name, full_name, filename, containing_type, fields, - nested_types, enum_types, extensions, options=None, - serialized_options=None, - is_extendable=True, extension_ranges=None, oneofs=None, - file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin - syntax=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindMessageTypeByName(full_name) - - # NOTE(tmarek): The file argument redefining a builtin is nothing we can - # fix right now since we don't know how many clients already rely on the - # name of the argument. - def __init__(self, name, full_name, filename, containing_type, fields, - nested_types, enum_types, extensions, options=None, - serialized_options=None, - is_extendable=True, extension_ranges=None, oneofs=None, - file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin - syntax=None): - """Arguments to __init__() are as described in the description - of Descriptor fields above. - - Note that filename is an obsolete argument, that is not used anymore. - Please use file.name to access this as an attribute. - """ - super(Descriptor, self).__init__( - options, 'MessageOptions', name, full_name, file, - containing_type, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - - # We have fields in addition to fields_by_name and fields_by_number, - # so that: - # 1. Clients can index fields by "order in which they're listed." - # 2. Clients can easily iterate over all fields with the terse - # syntax: for f in descriptor.fields: ... - self.fields = fields - for field in self.fields: - field.containing_type = self - self.fields_by_number = dict((f.number, f) for f in fields) - self.fields_by_name = dict((f.name, f) for f in fields) - self._fields_by_camelcase_name = None - - self.nested_types = nested_types - for nested_type in nested_types: - nested_type.containing_type = self - self.nested_types_by_name = dict((t.name, t) for t in nested_types) - - self.enum_types = enum_types - for enum_type in self.enum_types: - enum_type.containing_type = self - self.enum_types_by_name = dict((t.name, t) for t in enum_types) - self.enum_values_by_name = dict( - (v.name, v) for t in enum_types for v in t.values) - - self.extensions = extensions - for extension in self.extensions: - extension.extension_scope = self - self.extensions_by_name = dict((f.name, f) for f in extensions) - self.is_extendable = is_extendable - self.extension_ranges = extension_ranges - self.oneofs = oneofs if oneofs is not None else [] - self.oneofs_by_name = dict((o.name, o) for o in self.oneofs) - for oneof in self.oneofs: - oneof.containing_type = self - self.syntax = syntax or "proto2" - - @property - def fields_by_camelcase_name(self): - if self._fields_by_camelcase_name is None: - self._fields_by_camelcase_name = dict( - (f.camelcase_name, f) for f in self.fields) - return self._fields_by_camelcase_name - - def EnumValueName(self, enum, value): - """Returns the string name of an enum value. - - This is just a small helper method to simplify a common operation. - - Args: - enum: string name of the Enum. - value: int, value of the enum. - - Returns: - string name of the enum value. - - Raises: - KeyError if either the Enum doesn't exist or the value is not a valid - value for the enum. - """ - return self.enum_types_by_name[enum].values_by_number[value].name - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.DescriptorProto. - - Args: - proto: An empty descriptor_pb2.DescriptorProto. - """ - # This function is overridden to give a better doc comment. - super(Descriptor, self).CopyToProto(proto) - - -# TODO(robinson): We should have aggressive checking here, -# for example: -# * If you specify a repeated field, you should not be allowed -# to specify a default value. -# * [Other examples here as needed]. -# -# TODO(robinson): for this and other *Descriptor classes, we -# might also want to lock things down aggressively (e.g., -# prevent clients from setting the attributes). Having -# stronger invariants here in general will reduce the number -# of runtime checks we must do in reflection.py... -class FieldDescriptor(DescriptorBase): - - """Descriptor for a single field in a .proto file. - - A FieldDescriptor instance has the following attributes: - - name: (str) Name of this field, exactly as it appears in .proto. - full_name: (str) Name of this field, including containing scope. This is - particularly relevant for extensions. - camelcase_name: (str) Camelcase name of this field. - index: (int) Dense, 0-indexed index giving the order that this - field textually appears within its message in the .proto file. - number: (int) Tag number declared for this field in the .proto file. - - type: (One of the TYPE_* constants below) Declared type. - cpp_type: (One of the CPPTYPE_* constants below) C++ type used to - represent this field. - - label: (One of the LABEL_* constants below) Tells whether this - field is optional, required, or repeated. - has_default_value: (bool) True if this field has a default value defined, - otherwise false. - default_value: (Varies) Default value of this field. Only - meaningful for non-repeated scalar fields. Repeated fields - should always set this to [], and non-repeated composite - fields should always set this to None. - - containing_type: (Descriptor) Descriptor of the protocol message - type that contains this field. Set by the Descriptor constructor - if we're passed into one. - Somewhat confusingly, for extension fields, this is the - descriptor of the EXTENDED message, not the descriptor - of the message containing this field. (See is_extension and - extension_scope below). - message_type: (Descriptor) If a composite field, a descriptor - of the message type contained in this field. Otherwise, this is None. - enum_type: (EnumDescriptor) If this field contains an enum, a - descriptor of that enum. Otherwise, this is None. - - is_extension: True iff this describes an extension field. - extension_scope: (Descriptor) Only meaningful if is_extension is True. - Gives the message that immediately contains this extension field. - Will be None iff we're a top-level (file-level) extension field. - - options: (descriptor_pb2.FieldOptions) Protocol message field options or - None to use default field options. - - containing_oneof: (OneofDescriptor) If the field is a member of a oneof - union, contains its descriptor. Otherwise, None. - - file: (FileDescriptor) Reference to file descriptor. - """ - - # Must be consistent with C++ FieldDescriptor::Type enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - TYPE_DOUBLE = 1 - TYPE_FLOAT = 2 - TYPE_INT64 = 3 - TYPE_UINT64 = 4 - TYPE_INT32 = 5 - TYPE_FIXED64 = 6 - TYPE_FIXED32 = 7 - TYPE_BOOL = 8 - TYPE_STRING = 9 - TYPE_GROUP = 10 - TYPE_MESSAGE = 11 - TYPE_BYTES = 12 - TYPE_UINT32 = 13 - TYPE_ENUM = 14 - TYPE_SFIXED32 = 15 - TYPE_SFIXED64 = 16 - TYPE_SINT32 = 17 - TYPE_SINT64 = 18 - MAX_TYPE = 18 - - # Must be consistent with C++ FieldDescriptor::CppType enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - CPPTYPE_INT32 = 1 - CPPTYPE_INT64 = 2 - CPPTYPE_UINT32 = 3 - CPPTYPE_UINT64 = 4 - CPPTYPE_DOUBLE = 5 - CPPTYPE_FLOAT = 6 - CPPTYPE_BOOL = 7 - CPPTYPE_ENUM = 8 - CPPTYPE_STRING = 9 - CPPTYPE_MESSAGE = 10 - MAX_CPPTYPE = 10 - - _PYTHON_TO_CPP_PROTO_TYPE_MAP = { - TYPE_DOUBLE: CPPTYPE_DOUBLE, - TYPE_FLOAT: CPPTYPE_FLOAT, - TYPE_ENUM: CPPTYPE_ENUM, - TYPE_INT64: CPPTYPE_INT64, - TYPE_SINT64: CPPTYPE_INT64, - TYPE_SFIXED64: CPPTYPE_INT64, - TYPE_UINT64: CPPTYPE_UINT64, - TYPE_FIXED64: CPPTYPE_UINT64, - TYPE_INT32: CPPTYPE_INT32, - TYPE_SFIXED32: CPPTYPE_INT32, - TYPE_SINT32: CPPTYPE_INT32, - TYPE_UINT32: CPPTYPE_UINT32, - TYPE_FIXED32: CPPTYPE_UINT32, - TYPE_BYTES: CPPTYPE_STRING, - TYPE_STRING: CPPTYPE_STRING, - TYPE_BOOL: CPPTYPE_BOOL, - TYPE_MESSAGE: CPPTYPE_MESSAGE, - TYPE_GROUP: CPPTYPE_MESSAGE - } - - # Must be consistent with C++ FieldDescriptor::Label enum in - # descriptor.h. - # - # TODO(robinson): Find a way to eliminate this repetition. - LABEL_OPTIONAL = 1 - LABEL_REQUIRED = 2 - LABEL_REPEATED = 3 - MAX_LABEL = 3 - - # Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber, - # and kLastReservedNumber in descriptor.h - MAX_FIELD_NUMBER = (1 << 29) - 1 - FIRST_RESERVED_FIELD_NUMBER = 19000 - LAST_RESERVED_FIELD_NUMBER = 19999 - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.FieldDescriptor - - def __new__(cls, name, full_name, index, number, type, cpp_type, label, - default_value, message_type, enum_type, containing_type, - is_extension, extension_scope, options=None, - serialized_options=None, - has_default_value=True, containing_oneof=None, json_name=None, - file=None): # pylint: disable=redefined-builtin - _message.Message._CheckCalledFromGeneratedFile() - if is_extension: - return _message.default_pool.FindExtensionByName(full_name) - else: - return _message.default_pool.FindFieldByName(full_name) - - def __init__(self, name, full_name, index, number, type, cpp_type, label, - default_value, message_type, enum_type, containing_type, - is_extension, extension_scope, options=None, - serialized_options=None, - has_default_value=True, containing_oneof=None, json_name=None, - file=None): # pylint: disable=redefined-builtin - """The arguments are as described in the description of FieldDescriptor - attributes above. - - Note that containing_type may be None, and may be set later if necessary - (to deal with circular references between message types, for example). - Likewise for extension_scope. - """ - super(FieldDescriptor, self).__init__( - options, serialized_options, 'FieldOptions') - self.name = name - self.full_name = full_name - self.file = file - self._camelcase_name = None - if json_name is None: - self.json_name = _ToJsonName(name) - else: - self.json_name = json_name - self.index = index - self.number = number - self.type = type - self.cpp_type = cpp_type - self.label = label - self.has_default_value = has_default_value - self.default_value = default_value - self.containing_type = containing_type - self.message_type = message_type - self.enum_type = enum_type - self.is_extension = is_extension - self.extension_scope = extension_scope - self.containing_oneof = containing_oneof - if api_implementation.Type() == 'cpp': - if is_extension: - self._cdescriptor = _message.default_pool.FindExtensionByName(full_name) - else: - self._cdescriptor = _message.default_pool.FindFieldByName(full_name) - else: - self._cdescriptor = None - - @property - def camelcase_name(self): - if self._camelcase_name is None: - self._camelcase_name = _ToCamelCase(self.name) - return self._camelcase_name - - @staticmethod - def ProtoTypeToCppProtoType(proto_type): - """Converts from a Python proto type to a C++ Proto Type. - - The Python ProtocolBuffer classes specify both the 'Python' datatype and the - 'C++' datatype - and they're not the same. This helper method should - translate from one to another. - - Args: - proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*) - Returns: - descriptor.FieldDescriptor.CPPTYPE_*, the C++ type. - Raises: - TypeTransformationError: when the Python proto type isn't known. - """ - try: - return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type] - except KeyError: - raise TypeTransformationError('Unknown proto_type: %s' % proto_type) - - -class EnumDescriptor(_NestedDescriptorBase): - - """Descriptor for an enum defined in a .proto file. - - An EnumDescriptor instance has the following attributes: - - name: (str) Name of the enum type. - full_name: (str) Full name of the type, including package name - and any enclosing type(s). - - values: (list of EnumValueDescriptors) List of the values - in this enum. - values_by_name: (dict str -> EnumValueDescriptor) Same as |values|, - but indexed by the "name" field of each EnumValueDescriptor. - values_by_number: (dict int -> EnumValueDescriptor) Same as |values|, - but indexed by the "number" field of each EnumValueDescriptor. - containing_type: (Descriptor) Descriptor of the immediate containing - type of this enum, or None if this is an enum defined at the - top level in a .proto file. Set by Descriptor's constructor - if we're passed into one. - file: (FileDescriptor) Reference to file descriptor. - options: (descriptor_pb2.EnumOptions) Enum options message or - None to use default enum options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.EnumDescriptor - - def __new__(cls, name, full_name, filename, values, - containing_type=None, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindEnumTypeByName(full_name) - - def __init__(self, name, full_name, filename, values, - containing_type=None, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None): - """Arguments are as described in the attribute description above. - - Note that filename is an obsolete argument, that is not used anymore. - Please use file.name to access this as an attribute. - """ - super(EnumDescriptor, self).__init__( - options, 'EnumOptions', name, full_name, file, - containing_type, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - - self.values = values - for value in self.values: - value.type = self - self.values_by_name = dict((v.name, v) for v in values) - # Values are reversed to ensure that the first alias is retained. - self.values_by_number = dict((v.number, v) for v in reversed(values)) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.EnumDescriptorProto. - - Args: - proto: An empty descriptor_pb2.EnumDescriptorProto. - """ - # This function is overridden to give a better doc comment. - super(EnumDescriptor, self).CopyToProto(proto) - - -class EnumValueDescriptor(DescriptorBase): - - """Descriptor for a single value within an enum. - - name: (str) Name of this value. - index: (int) Dense, 0-indexed index giving the order that this - value appears textually within its enum in the .proto file. - number: (int) Actual number assigned to this enum value. - type: (EnumDescriptor) EnumDescriptor to which this value - belongs. Set by EnumDescriptor's constructor if we're - passed into one. - options: (descriptor_pb2.EnumValueOptions) Enum value options message or - None to use default enum value options options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor - - def __new__(cls, name, index, number, - type=None, # pylint: disable=redefined-builtin - options=None, serialized_options=None): - _message.Message._CheckCalledFromGeneratedFile() - # There is no way we can build a complete EnumValueDescriptor with the - # given parameters (the name of the Enum is not known, for example). - # Fortunately generated files just pass it to the EnumDescriptor() - # constructor, which will ignore it, so returning None is good enough. - return None - - def __init__(self, name, index, number, - type=None, # pylint: disable=redefined-builtin - options=None, serialized_options=None): - """Arguments are as described in the attribute description above.""" - super(EnumValueDescriptor, self).__init__( - options, serialized_options, 'EnumValueOptions') - self.name = name - self.index = index - self.number = number - self.type = type - - -class OneofDescriptor(DescriptorBase): - """Descriptor for a oneof field. - - name: (str) Name of the oneof field. - full_name: (str) Full name of the oneof field, including package name. - index: (int) 0-based index giving the order of the oneof field inside - its containing type. - containing_type: (Descriptor) Descriptor of the protocol message - type that contains this field. Set by the Descriptor constructor - if we're passed into one. - fields: (list of FieldDescriptor) The list of field descriptors this - oneof can contain. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.OneofDescriptor - - def __new__( - cls, name, full_name, index, containing_type, fields, options=None, - serialized_options=None): - _message.Message._CheckCalledFromGeneratedFile() - return _message.default_pool.FindOneofByName(full_name) - - def __init__( - self, name, full_name, index, containing_type, fields, options=None, - serialized_options=None): - """Arguments are as described in the attribute description above.""" - super(OneofDescriptor, self).__init__( - options, serialized_options, 'OneofOptions') - self.name = name - self.full_name = full_name - self.index = index - self.containing_type = containing_type - self.fields = fields - - -class ServiceDescriptor(_NestedDescriptorBase): - - """Descriptor for a service. - - name: (str) Name of the service. - full_name: (str) Full name of the service, including package name. - index: (int) 0-indexed index giving the order that this services - definition appears withing the .proto file. - methods: (list of MethodDescriptor) List of methods provided by this - service. - methods_by_name: (dict str -> MethodDescriptor) Same MethodDescriptor - objects as in |methods_by_name|, but indexed by "name" attribute in each - MethodDescriptor. - options: (descriptor_pb2.ServiceOptions) Service options message or - None to use default service options. - file: (FileDescriptor) Reference to file info. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.ServiceDescriptor - - def __new__(cls, name, full_name, index, methods, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None): - _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access - return _message.default_pool.FindServiceByName(full_name) - - def __init__(self, name, full_name, index, methods, options=None, - serialized_options=None, file=None, # pylint: disable=redefined-builtin - serialized_start=None, serialized_end=None): - super(ServiceDescriptor, self).__init__( - options, 'ServiceOptions', name, full_name, file, - None, serialized_start=serialized_start, - serialized_end=serialized_end, serialized_options=serialized_options) - self.index = index - self.methods = methods - self.methods_by_name = dict((m.name, m) for m in methods) - # Set the containing service for each method in this service. - for method in self.methods: - method.containing_service = self - - def FindMethodByName(self, name): - """Searches for the specified method, and returns its descriptor.""" - return self.methods_by_name.get(name, None) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.ServiceDescriptorProto. - - Args: - proto: An empty descriptor_pb2.ServiceDescriptorProto. - """ - # This function is overridden to give a better doc comment. - super(ServiceDescriptor, self).CopyToProto(proto) - - -class MethodDescriptor(DescriptorBase): - - """Descriptor for a method in a service. - - name: (str) Name of the method within the service. - full_name: (str) Full name of method. - index: (int) 0-indexed index of the method inside the service. - containing_service: (ServiceDescriptor) The service that contains this - method. - input_type: The descriptor of the message that this method accepts. - output_type: The descriptor of the message that this method returns. - options: (descriptor_pb2.MethodOptions) Method options message or - None to use default method options. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.MethodDescriptor - - def __new__(cls, name, full_name, index, containing_service, - input_type, output_type, options=None, serialized_options=None): - _message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access - return _message.default_pool.FindMethodByName(full_name) - - def __init__(self, name, full_name, index, containing_service, - input_type, output_type, options=None, serialized_options=None): - """The arguments are as described in the description of MethodDescriptor - attributes above. - - Note that containing_service may be None, and may be set later if necessary. - """ - super(MethodDescriptor, self).__init__( - options, serialized_options, 'MethodOptions') - self.name = name - self.full_name = full_name - self.index = index - self.containing_service = containing_service - self.input_type = input_type - self.output_type = output_type - - -class FileDescriptor(DescriptorBase): - """Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto. - - Note that enum_types_by_name, extensions_by_name, and dependencies - fields are only set by the message_factory module, and not by the - generated proto code. - - name: name of file, relative to root of source tree. - package: name of the package - syntax: string indicating syntax of the file (can be "proto2" or "proto3") - serialized_pb: (str) Byte string of serialized - descriptor_pb2.FileDescriptorProto. - dependencies: List of other FileDescriptors this FileDescriptor depends on. - public_dependencies: A list of FileDescriptors, subset of the dependencies - above, which were declared as "public". - message_types_by_name: Dict of message names and their descriptors. - enum_types_by_name: Dict of enum names and their descriptors. - extensions_by_name: Dict of extension names and their descriptors. - services_by_name: Dict of services names and their descriptors. - pool: the DescriptorPool this descriptor belongs to. When not passed to the - constructor, the global default pool is used. - """ - - if _USE_C_DESCRIPTORS: - _C_DESCRIPTOR_CLASS = _message.FileDescriptor - - def __new__(cls, name, package, options=None, - serialized_options=None, serialized_pb=None, - dependencies=None, public_dependencies=None, - syntax=None, pool=None): - # FileDescriptor() is called from various places, not only from generated - # files, to register dynamic proto files and messages. - if serialized_pb: - # TODO(amauryfa): use the pool passed as argument. This will work only - # for C++-implemented DescriptorPools. - return _message.default_pool.AddSerializedFile(serialized_pb) - else: - return super(FileDescriptor, cls).__new__(cls) - - def __init__(self, name, package, options=None, - serialized_options=None, serialized_pb=None, - dependencies=None, public_dependencies=None, - syntax=None, pool=None): - """Constructor.""" - super(FileDescriptor, self).__init__( - options, serialized_options, 'FileOptions') - - if pool is None: - from google.protobuf import descriptor_pool - pool = descriptor_pool.Default() - self.pool = pool - self.message_types_by_name = {} - self.name = name - self.package = package - self.syntax = syntax or "proto2" - self.serialized_pb = serialized_pb - - self.enum_types_by_name = {} - self.extensions_by_name = {} - self.services_by_name = {} - self.dependencies = (dependencies or []) - self.public_dependencies = (public_dependencies or []) - - if (api_implementation.Type() == 'cpp' and - self.serialized_pb is not None): - _message.default_pool.AddSerializedFile(self.serialized_pb) - - def CopyToProto(self, proto): - """Copies this to a descriptor_pb2.FileDescriptorProto. - - Args: - proto: An empty descriptor_pb2.FileDescriptorProto. - """ - proto.ParseFromString(self.serialized_pb) - - -def _ParseOptions(message, string): - """Parses serialized options. - - This helper function is used to parse serialized options in generated - proto2 files. It must not be used outside proto2. - """ - message.ParseFromString(string) - return message - - -def _ToCamelCase(name): - """Converts name to camel-case and returns it.""" - capitalize_next = False - result = [] - - for c in name: - if c == '_': - if result: - capitalize_next = True - elif capitalize_next: - result.append(c.upper()) - capitalize_next = False - else: - result += c - - # Lower-case the first letter. - if result and result[0].isupper(): - result[0] = result[0].lower() - return ''.join(result) - - -def _OptionsOrNone(descriptor_proto): - """Returns the value of the field `options`, or None if it is not set.""" - if descriptor_proto.HasField('options'): - return descriptor_proto.options - else: - return None - - -def _ToJsonName(name): - """Converts name to Json name and returns it.""" - capitalize_next = False - result = [] - - for c in name: - if c == '_': - capitalize_next = True - elif capitalize_next: - result.append(c.upper()) - capitalize_next = False - else: - result += c - - return ''.join(result) - - -def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, - syntax=None): - """Make a protobuf Descriptor given a DescriptorProto protobuf. - - Handles nested descriptors. Note that this is limited to the scope of defining - a message inside of another message. Composite fields can currently only be - resolved if the message is defined in the same scope as the field. - - Args: - desc_proto: The descriptor_pb2.DescriptorProto protobuf message. - package: Optional package name for the new message Descriptor (string). - build_file_if_cpp: Update the C++ descriptor pool if api matches. - Set to False on recursion, so no duplicates are created. - syntax: The syntax/semantics that should be used. Set to "proto3" to get - proto3 field presence semantics. - Returns: - A Descriptor for protobuf messages. - """ - if api_implementation.Type() == 'cpp' and build_file_if_cpp: - # The C++ implementation requires all descriptors to be backed by the same - # definition in the C++ descriptor pool. To do this, we build a - # FileDescriptorProto with the same definition as this descriptor and build - # it into the pool. - from google.protobuf import descriptor_pb2 - file_descriptor_proto = descriptor_pb2.FileDescriptorProto() - file_descriptor_proto.message_type.add().MergeFrom(desc_proto) - - # Generate a random name for this proto file to prevent conflicts with any - # imported ones. We need to specify a file name so the descriptor pool - # accepts our FileDescriptorProto, but it is not important what that file - # name is actually set to. - proto_name = binascii.hexlify(os.urandom(16)).decode('ascii') - - if package: - file_descriptor_proto.name = os.path.join(package.replace('.', '/'), - proto_name + '.proto') - file_descriptor_proto.package = package - else: - file_descriptor_proto.name = proto_name + '.proto' - - _message.default_pool.Add(file_descriptor_proto) - result = _message.default_pool.FindFileByName(file_descriptor_proto.name) - - if _USE_C_DESCRIPTORS: - return result.message_types_by_name[desc_proto.name] - - full_message_name = [desc_proto.name] - if package: full_message_name.insert(0, package) - - # Create Descriptors for enum types - enum_types = {} - for enum_proto in desc_proto.enum_type: - full_name = '.'.join(full_message_name + [enum_proto.name]) - enum_desc = EnumDescriptor( - enum_proto.name, full_name, None, [ - EnumValueDescriptor(enum_val.name, ii, enum_val.number) - for ii, enum_val in enumerate(enum_proto.value)]) - enum_types[full_name] = enum_desc - - # Create Descriptors for nested types - nested_types = {} - for nested_proto in desc_proto.nested_type: - full_name = '.'.join(full_message_name + [nested_proto.name]) - # Nested types are just those defined inside of the message, not all types - # used by fields in the message, so no loops are possible here. - nested_desc = MakeDescriptor(nested_proto, - package='.'.join(full_message_name), - build_file_if_cpp=False, - syntax=syntax) - nested_types[full_name] = nested_desc - - fields = [] - for field_proto in desc_proto.field: - full_name = '.'.join(full_message_name + [field_proto.name]) - enum_desc = None - nested_desc = None - if field_proto.json_name: - json_name = field_proto.json_name - else: - json_name = None - if field_proto.HasField('type_name'): - type_name = field_proto.type_name - full_type_name = '.'.join(full_message_name + - [type_name[type_name.rfind('.')+1:]]) - if full_type_name in nested_types: - nested_desc = nested_types[full_type_name] - elif full_type_name in enum_types: - enum_desc = enum_types[full_type_name] - # Else type_name references a non-local type, which isn't implemented - field = FieldDescriptor( - field_proto.name, full_name, field_proto.number - 1, - field_proto.number, field_proto.type, - FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type), - field_proto.label, None, nested_desc, enum_desc, None, False, None, - options=_OptionsOrNone(field_proto), has_default_value=False, - json_name=json_name) - fields.append(field) - - desc_name = '.'.join(full_message_name) - return Descriptor(desc_proto.name, desc_name, None, None, fields, - list(nested_types.values()), list(enum_types.values()), [], - options=_OptionsOrNone(desc_proto)) From f19d438c4f95079a5651cf03e024bacce29d04bf Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Tue, 15 Sep 2020 12:41:02 -0600 Subject: [PATCH 02/11] implement stateless function support... --- .gitignore | 12 ++++++++++++ .travis.yml | 1 + 2 files changed, 13 insertions(+) diff --git a/.gitignore b/.gitignore index bb2c636..9157b54 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,15 @@ /docs/project/project/ /docs/project/target/ /docs/target/ +venv +/.venv/ +/dist/ +*_pb2.py +*_pb2_grpc.py +/build +**/*.egg* +*.pyc +*.iml +protobuf/frontend +protobuf/protocol +protobuf/example/shoppingcart \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index fe6696a..db01170 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,6 +9,7 @@ jobs: - stage: build install: - pip install -r requirements.txt + - pip install . -vvv script: pytest deploy: provider: pypi From f284b8dab9b1af48cb1e312c52e682fe4a7cc4a6 Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Tue, 15 Sep 2020 13:01:29 -0600 Subject: [PATCH 03/11] tck passing --- .dockerignore | 2 + .flake8 | 12 + Description.md | 235 +++++++++++ Dockerfile | 20 + LICENSE | 201 ++++++++++ README.md | 33 ++ cloudstate/__init__.py | 10 + cloudstate/cloudstate.py | 100 +++++ cloudstate/contexts.py | 60 +++ cloudstate/discovery_servicer.py | 113 ++++++ cloudstate/event_sourced_context.py | 46 +++ cloudstate/event_sourced_entity.py | 202 ++++++++++ cloudstate/eventsourced_servicer.py | 144 +++++++ cloudstate/function_servicer.py | 164 ++++++++ cloudstate/stateless_function_context.py | 23 ++ cloudstate/stateless_function_entity.py | 188 +++++++++ cloudstate/test/__init__.py | 0 cloudstate/test/functiondemo/__init__.py | 0 .../test/functiondemo/function_definition.py | 87 ++++ .../test/functiondemo/test_functiondemo.py | 87 ++++ cloudstate/test/run_test_server.py | 32 ++ cloudstate/test/shoppingcart/Dockerfile | 9 + cloudstate/test/shoppingcart/__init__.py | 0 .../test/shoppingcart/persistence/__init__.py | 0 cloudstate/test/shoppingcart/requirements.txt | 6 + cloudstate/test/shoppingcart/shopping_cart.py | 15 + .../test/shoppingcart/shopping_cart_entity.py | 130 ++++++ .../test/shoppingcart/test_shoppingcart.py | 36 ++ cloudstate/test/tck_services.py | 35 ++ cloudstate/utils/__init__.py | 0 cloudstate/utils/payload_utils.py | 27 ++ cloudstate/version.py | 6 + docs/README.md | 23 ++ docs/build.sbt | 10 + docs/project/build.properties | 1 + docs/project/plugins.sbt | 2 + docs/src/main/paradox/gettingstarted.md | 11 + docs/src/main/paradox/index.md | 7 + extended_tck.sh | 66 +++ protobuf/lib/cloudstate/crdt.proto | 379 ++++++++++++++++++ protobuf/lib/cloudstate/entity.proto | 191 +++++++++ protobuf/lib/cloudstate/event_sourced.proto | 115 ++++++ protobuf/lib/cloudstate/function.proto | 61 +++ protobuf/lib/google/api/annotations.proto | 32 ++ protobuf/lib/google/api/http.proto | 377 +++++++++++++++++ protobuf/lib/google/api/httpbody.proto | 78 ++++ .../grpc/reflection/v1alpha/reflection.proto | 136 +++++++ protobuf/proto/cloudstate/entity_key.proto | 30 ++ protobuf/proto/cloudstate/eventing.proto | 35 ++ .../test/functiondemo/functiondemo.proto | 36 ++ .../test/functiondemo/functiondemo2.proto | 22 + .../shoppingcart/persistence/domain.proto | 27 ++ .../test/shoppingcart/shoppingcart.proto | 63 +++ requirements.txt | 9 + scripts/compile-protobuf.sh | 28 ++ scripts/fetch-cloudstate-pb.sh | 35 ++ setup.cfg | 33 ++ setup.py | 72 ++++ 58 files changed, 3902 insertions(+) create mode 100644 .dockerignore create mode 100644 .flake8 create mode 100644 Description.md create mode 100644 Dockerfile create mode 100644 LICENSE create mode 100644 README.md create mode 100644 cloudstate/__init__.py create mode 100644 cloudstate/cloudstate.py create mode 100644 cloudstate/contexts.py create mode 100755 cloudstate/discovery_servicer.py create mode 100644 cloudstate/event_sourced_context.py create mode 100644 cloudstate/event_sourced_entity.py create mode 100644 cloudstate/eventsourced_servicer.py create mode 100644 cloudstate/function_servicer.py create mode 100644 cloudstate/stateless_function_context.py create mode 100644 cloudstate/stateless_function_entity.py create mode 100644 cloudstate/test/__init__.py create mode 100644 cloudstate/test/functiondemo/__init__.py create mode 100644 cloudstate/test/functiondemo/function_definition.py create mode 100644 cloudstate/test/functiondemo/test_functiondemo.py create mode 100644 cloudstate/test/run_test_server.py create mode 100644 cloudstate/test/shoppingcart/Dockerfile create mode 100644 cloudstate/test/shoppingcart/__init__.py create mode 100644 cloudstate/test/shoppingcart/persistence/__init__.py create mode 100644 cloudstate/test/shoppingcart/requirements.txt create mode 100644 cloudstate/test/shoppingcart/shopping_cart.py create mode 100644 cloudstate/test/shoppingcart/shopping_cart_entity.py create mode 100644 cloudstate/test/shoppingcart/test_shoppingcart.py create mode 100644 cloudstate/test/tck_services.py create mode 100644 cloudstate/utils/__init__.py create mode 100644 cloudstate/utils/payload_utils.py create mode 100644 cloudstate/version.py create mode 100644 docs/README.md create mode 100644 docs/build.sbt create mode 100644 docs/project/build.properties create mode 100644 docs/project/plugins.sbt create mode 100644 docs/src/main/paradox/gettingstarted.md create mode 100644 docs/src/main/paradox/index.md create mode 100755 extended_tck.sh create mode 100644 protobuf/lib/cloudstate/crdt.proto create mode 100644 protobuf/lib/cloudstate/entity.proto create mode 100644 protobuf/lib/cloudstate/event_sourced.proto create mode 100644 protobuf/lib/cloudstate/function.proto create mode 100644 protobuf/lib/google/api/annotations.proto create mode 100644 protobuf/lib/google/api/http.proto create mode 100644 protobuf/lib/google/api/httpbody.proto create mode 100644 protobuf/lib/grpc/reflection/v1alpha/reflection.proto create mode 100644 protobuf/proto/cloudstate/entity_key.proto create mode 100644 protobuf/proto/cloudstate/eventing.proto create mode 100644 protobuf/proto/cloudstate/test/functiondemo/functiondemo.proto create mode 100644 protobuf/proto/cloudstate/test/functiondemo/functiondemo2.proto create mode 100644 protobuf/proto/cloudstate/test/shoppingcart/persistence/domain.proto create mode 100644 protobuf/proto/cloudstate/test/shoppingcart/shoppingcart.proto create mode 100644 requirements.txt create mode 100755 scripts/compile-protobuf.sh create mode 100755 scripts/fetch-cloudstate-pb.sh create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..86d1dea --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +Dockerfile +**/*_pb2_* \ No newline at end of file diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..0e0e08b --- /dev/null +++ b/.flake8 @@ -0,0 +1,12 @@ +[flake8] +# Recommend matching the black line length (default 88), +# rather than using the flake8 default of 79: +max-line-length = 88 +extend-ignore = + # See https://github.com/PyCQA/pycodestyle/issues/373 + E203, +exclude = + **/*_pb2.py + **/*_pb2_grpc.py + venv + build \ No newline at end of file diff --git a/Description.md b/Description.md new file mode 100644 index 0000000..1a6357d --- /dev/null +++ b/Description.md @@ -0,0 +1,235 @@ + +Cloudstate is a specification, protocol, and reference implementation for providing distributed state management patterns suitable for **Serverless** computing. +The current supported and envisioned patterns include: + +* **Event Sourcing** +* **Conflict-Free Replicated Data Types (CRDTs)** +* **Key-Value storage** +* **P2P messaging** +* **CQRS read side projections** + +Cloudstate is polyglot, which means that services can be written in any language that supports gRPC, +and with language specific libraries provided that allow idiomatic use of the patterns in each language. +Cloudstate can be used either by itself, in combination with a Service Mesh, +or it is envisioned that it will be integrated with other Serverless technologies such as [Knative](https://knative.dev/). + +Read more about the design, architecture, techniques, and technologies behind Cloudstate in [this section in the documentation](https://github.com/cloudstateio/cloudstate/blob/master/README.md#enter-cloudstate). + +The Cloudstate Python user language support is a library that implements the Cloudstate protocol and offers an pythonistic API +for writing entities that implement the types supported by the Cloudstate protocol. + +The Cloudstate documentation can be found [here](https://cloudstate.io/docs/) + +## Install and update using pip: + +``` +pip install -U cloudstate +``` + +## A Simple EventSourced Example: + +### 1. Define your gRPC contract + +``` +// This is the public API offered by the shopping cart entity. +syntax = "proto3"; + +import "google/protobuf/empty.proto"; +import "cloudstate/entity_key.proto"; +import "google/api/annotations.proto"; +import "google/api/http.proto"; + +package com.example.shoppingcart; + +message AddLineItem { + string user_id = 1 [(.cloudstate.entity_key) = true]; + string product_id = 2; + string name = 3; + int32 quantity = 4; +} + +message RemoveLineItem { + string user_id = 1 [(.cloudstate.entity_key) = true]; + string product_id = 2; +} + +message GetShoppingCart { + string user_id = 1 [(.cloudstate.entity_key) = true]; +} + +message LineItem { + string product_id = 1; + string name = 2; + int32 quantity = 3; +} + +message Cart { + repeated LineItem items = 1; +} + +service ShoppingCart { + rpc AddItem(AddLineItem) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/cart/{user_id}/items/add", + body: "*", + }; + } + + rpc RemoveItem(RemoveLineItem) returns (google.protobuf.Empty) { + option (google.api.http).post = "/cart/{user_id}/items/{product_id}/remove"; + } + + rpc GetCart(GetShoppingCart) returns (Cart) { + option (google.api.http) = { + get: "/carts/{user_id}", + additional_bindings: { + get: "/carts/{user_id}/items", + response_body: "items" + } + }; + } +} + +``` + +### 2. Generate Python files + +It is necessary to compile your .proto files using the protoc compiler in order to generate Python files. +See [this official gRPC for Python quickstart](https://grpc.io/docs/languages/python/quickstart/) if you are not familiar with the gRPC protocol. + +Here is an example of how to compile the sample proto file: +``` +python -m grpc_tools.protoc -I../../protos --python_out=. --grpc_python_out=. ../../protos/shoppingcart.proto +``` + +### 3. Implement your business logic under an EventSourced Cloudstate Entity + +``` +from dataclasses import dataclass, field +from typing import MutableMapping + +from google.protobuf.empty_pb2 import Empty + +from cloudstate.event_sourced_context import EventSourcedCommandContext +from cloudstate.event_sourced_entity import EventSourcedEntity +from shoppingcart.domain_pb2 import (Cart as DomainCart, LineItem as DomainLineItem, ItemAdded, ItemRemoved) +from shoppingcart.shoppingcart_pb2 import (Cart, LineItem, AddLineItem, RemoveLineItem) +from shoppingcart.shoppingcart_pb2 import (_SHOPPINGCART, DESCRIPTOR as FILE_DESCRIPTOR) + + +@dataclass +class ShoppingCartState: + entity_id: str + cart: MutableMapping[str, LineItem] = field(default_factory=dict) + + +def init(entity_id: str) -> ShoppingCartState: + return ShoppingCartState(entity_id) + + +entity = EventSourcedEntity(_SHOPPINGCART, [FILE_DESCRIPTOR], init) + + +def to_domain_line_item(item): + domain_item = DomainLineItem() + domain_item.productId = item.product_id + domain_item.name = item.name + domain_item.quantity = item.quantity + return domain_item + + +@entity.snapshot() +def snapshot(state: ShoppingCartState): + cart = DomainCart() + cart.items = [to_domain_line_item(item) for item in state.cart.values()] + return cart + + +def to_line_item(domain_item): + item = LineItem() + item.product_id = domain_item.productId + item.name = domain_item.name + item.quantity = domain_item.quantity + return item + + +@entity.snapshot_handler() +def handle_snapshot(state: ShoppingCartState, domain_cart: DomainCart): + state.cart = {domain_item.productId: to_line_item(domain_item) for domain_item in domain_cart.items} + + +@entity.event_handler(ItemAdded) +def item_added(state: ShoppingCartState, event: ItemAdded): + cart = state.cart + if event.item.productId in cart: + item = cart[event.item.productId] + item.quantity = item.quantity + event.item.quantity + else: + item = to_line_item(event.item) + cart[item.product_id] = item + + +@entity.event_handler(ItemRemoved) +def item_removed(state: ShoppingCartState, event: ItemRemoved): + del state.cart[event.productId] + + +@entity.command_handler("GetCart") +def get_cart(state: ShoppingCartState): + cart = Cart() + cart.items.extend(state.cart.values()) + return cart + + +@entity.command_handler("AddItem") +def add_item(item: AddLineItem, ctx: EventSourcedCommandContext): + if item.quantity <= 0: + ctx.fail("Cannot add negative quantity of to item {}".format(item.productId)) + else: + item_added_event = ItemAdded() + item_added_event.item.CopyFrom(to_domain_line_item(item)) + ctx.emit(item_added_event) + return Empty() + + +@entity.command_handler("RemoveItem") +def remove_item(state: ShoppingCartState, item: RemoveLineItem, ctx: EventSourcedCommandContext): + cart = state.cart + if item.product_id not in cart: + ctx.fail("Cannot remove item {} because it is not in the cart.".format(item.productId)) + else: + item_removed_event = ItemRemoved() + item_removed_event.productId = item.product_id + ctx.emit(item_removed_event) + return Empty() +``` + +### 4. Register Entity + +``` +from cloudstate.cloudstate import CloudState +from shoppingcart.shopping_cart_entity import entity as shopping_cart_entity +import logging + +if __name__ == '__main__': + logging.basicConfig() + CloudState().register_event_sourced_entity(shopping_cart_entity).start() +``` + +### 5. Deployment + +Cloudstate runs on Docker and Kubernetes you need to package your application so that it works as a Docker container +and can deploy it together with Cloudstate Operator on Kubernetes, the details and examples of all of which can be found [here](https://code.visualstudio.com/docs/containers/quickstart-python), [here](https://github.com/cloudstateio/python-support/blob/master/shoppingcart/Dockerfile) and [here](https://cloudstate.io/docs/core/current/user/deployment/index.html). + +## Contributing + +For guidance on setting up a development environment and how to make a contribution to Cloudstate, +see the contributing [project page](https://github.com/cloudstateio/python-support) or consult an official documentation [here](https://cloudstate.io/docs/). + +## Links + +* [Website](https://cloudstate.io/) +* [Documentation](https://cloudstate.io/docs/) +* [Releases](https://pypi.org/project/cloudstate/) +* [Code](https://github.com/cloudstateio/python-support) +* [Issue tracker](https://github.com/cloudstateio/python-support/issues) diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..74abe9e --- /dev/null +++ b/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.8.0-slim + +WORKDIR /python-support + +RUN apt-get update && apt-get install -y curl +COPY ./requirements.txt /python-support/requirements.txt +RUN pip install -r /python-support/requirements.txt +COPY ./scripts /python-support/scripts +COPY ./protobuf /python-support/protobuf +COPY ./cloudstate /python-support/cloudstate +COPY ./setup.py /python-support/setup.py +COPY ./Description.md /python-support/Description.md + + +RUN pip install . -vvv + +WORKDIR / +ENTRYPOINT ["python", "-m", "cloudstate.test.tck_services"] + +EXPOSE 8080 \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md new file mode 100644 index 0000000..de6b459 --- /dev/null +++ b/README.md @@ -0,0 +1,33 @@ +# Python User Language Support +Python User Language Support for [Cloudstate](https://github.com/cloudstateio/cloudstate). + +## Installation via source + +``` +> git clone https://github.com/cloudstateio/python-support.git +Cloning into 'python-support'... + +> cd python-support +> python3 -m venv ./venv +> source ./venv/bin/activate +> python --version +Python 3.7.3 +> pip --version +> pip install wheel +> pip install . +``` + +### generate installer +``` +python setup.py bdist_wheel +``` + +### local install +``` +python -m pip install dist/cloudstate--py3-none-any.whl +``` + +### build and run tck, including provisional tests for stateless functions. +``` +./extended_tck.sh +``` \ No newline at end of file diff --git a/cloudstate/__init__.py b/cloudstate/__init__.py new file mode 100644 index 0000000..225b08b --- /dev/null +++ b/cloudstate/__init__.py @@ -0,0 +1,10 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +from .version import __version__ + +__all__ = [ + "__version__", +] diff --git a/cloudstate/cloudstate.py b/cloudstate/cloudstate.py new file mode 100644 index 0000000..5d4d998 --- /dev/null +++ b/cloudstate/cloudstate.py @@ -0,0 +1,100 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" +import logging +import multiprocessing +import os +from concurrent import futures +from dataclasses import dataclass, field +from typing import List, Optional + +import grpc + +from cloudstate.discovery_servicer import CloudStateEntityDiscoveryServicer +from cloudstate.entity_pb2_grpc import add_EntityDiscoveryServicer_to_server +from cloudstate.event_sourced_entity import EventSourcedEntity +from cloudstate.event_sourced_pb2_grpc import add_EventSourcedServicer_to_server +from cloudstate.eventsourced_servicer import CloudStateEventSourcedServicer +from cloudstate.function_pb2_grpc import add_StatelessFunctionServicer_to_server +from cloudstate.function_servicer import CloudStateStatelessFunctionServicer +from cloudstate.stateless_function_entity import StatelessFunction + + +@dataclass +class CloudState: + logging.basicConfig( + format="%(asctime)s - %(filename)s - %(levelname)s: %(message)s", + level=logging.DEBUG, + ) + logging.root.setLevel(logging.DEBUG) + + __address: str = "" + __host = "127.0.0.1" + __port = "8080" + __workers = multiprocessing.cpu_count() + __event_sourced_entities: List[EventSourcedEntity] = field(default_factory=list) + __stateless_function_entities: List[StatelessFunction] = field(default_factory=list) + + def host(self, address: str): + """Set the address of the network Host. + Default Address is 127.0.0.1. + """ + self.__host = address + return self + + def port(self, port: str): + """Set the address of the network Port. + Default Port is 8080. + """ + self.__port = port + return self + + def max_workers(self, workers: Optional[int] = multiprocessing.cpu_count()): + """Set the gRPC Server number of Workers. + Default is equal than number of CPU Cores in the machine. + """ + self.__workers = workers + return self + + def register_event_sourced_entity(self, entity: EventSourcedEntity): + """Registry the user EventSourced entity.""" + self.__event_sourced_entities.append(entity) + return self + + def register_stateless_function_entity(self, entity: StatelessFunction): + """Registry the user Stateless Function entity.""" + self.__stateless_function_entities.append(entity) + return self + + def start(self): + """Start the user function and gRPC Server.""" + + self.__address = "{}:{}".format( + os.environ.get("HOST", self.__host), os.environ.get("PORT", self.__port) + ) + + server = grpc.server(futures.ThreadPoolExecutor(max_workers=self.__workers)) + + # event sourced + add_EntityDiscoveryServicer_to_server( + CloudStateEntityDiscoveryServicer( + self.__event_sourced_entities, self.__stateless_function_entities + ), + server, + ) + add_EventSourcedServicer_to_server( + CloudStateEventSourcedServicer(self.__event_sourced_entities), server + ) + add_StatelessFunctionServicer_to_server( + CloudStateStatelessFunctionServicer(self.__stateless_function_entities), + server, + ) + logging.info("Starting Cloudstate on address %s", self.__address) + try: + server.add_insecure_port(self.__address) + server.start() + except IOError as e: + logging.error("Error on start Cloudstate %s", e.__cause__) + + return server diff --git a/cloudstate/contexts.py b/cloudstate/contexts.py new file mode 100644 index 0000000..3c7c402 --- /dev/null +++ b/cloudstate/contexts.py @@ -0,0 +1,60 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +from typing import List + +from cloudstate.entity_pb2 import ClientAction, Failure, Forward, Reply, SideEffect + + +class Context: + """Root class of all contexts.""" + + pass + + +class ClientActionContext(Context): + """Context that provides client actions, which include failing and forwarding. + These contexts are typically made available in response to commands.""" + + def __init__(self, command_id: int): + self.command_id: int = command_id + self.errors: List[str] = [] + self.effects: List[SideEffect] = [] + self.forward: Forward = None + + def fail(self, error_message: str): + """Fail the command with the given message""" + self.errors.append(error_message) + + def has_errors(self): + return len(self.errors) > 0 + + def create_client_action(self, result, allow_reply): + client_action = ClientAction() + if self.has_errors(): + failure = Failure() + failure.command_id = self.command_id + failure.description = str(self.errors) + client_action.failure.CopyFrom(failure) + + return client_action + + elif result: + if self.forward: + raise Exception( + "Both a reply was returned, and a forward message was sent, " + "choose one or the other." + ) + else: + reply = Reply() + reply.payload.Pack(result) + client_action.reply.CopyFrom(reply) + elif self.forward: + client_action.forward.CopyFrom(self.forward) + elif allow_reply: + return None + else: + raise Exception("No reply or forward returned by command handler!") + return client_action diff --git a/cloudstate/discovery_servicer.py b/cloudstate/discovery_servicer.py new file mode 100755 index 0000000..6191239 --- /dev/null +++ b/cloudstate/discovery_servicer.py @@ -0,0 +1,113 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import platform +from dataclasses import dataclass +from logging import getLogger +from pprint import pprint +from typing import List + +from google.protobuf.descriptor_pb2 import FileDescriptorProto, FileDescriptorSet +from google.protobuf.descriptor_pool import Default +from google.protobuf.empty_pb2 import Empty + +from cloudstate import entity_pb2 +from cloudstate.entity_pb2_grpc import EntityDiscoveryServicer +from cloudstate.event_sourced_entity import EventSourcedEntity +from cloudstate.stateless_function_entity import StatelessFunction + +logger = getLogger() + + +@dataclass +class CloudStateEntityDiscoveryServicer(EntityDiscoveryServicer): + event_sourced_entities: List[EventSourcedEntity] + stateless_function_entities: List[StatelessFunction] + + def discover(self, request, context): + logger.info("discovering.") + pprint(request) + descriptor_set = FileDescriptorSet() + for entity in self.event_sourced_entities + self.stateless_function_entities: + logger.info(f"entity: {entity.name()}") + for descriptor in entity.file_descriptors: + logger.info(f"discovering {descriptor.name}") + logger.info(f"SD: {entity.service_descriptor.full_name}") + from_string = FileDescriptorProto.FromString(descriptor.serialized_pb) + descriptor_set.file.append(from_string) + + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default().FindFileByName("google/protobuf/empty.proto").serialized_pb + ) + ) + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default().FindFileByName("cloudstate/entity_key.proto").serialized_pb + ) + ) + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default().FindFileByName("cloudstate/eventing.proto").serialized_pb + ) + ) + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default() + .FindFileByName("google/protobuf/descriptor.proto") + .serialized_pb + ) + ) + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default().FindFileByName("google/api/annotations.proto").serialized_pb + ) + ) + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default().FindFileByName("google/api/http.proto").serialized_pb + ) + ) + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default().FindFileByName("google/api/httpbody.proto").serialized_pb + ) + ) + descriptor_set.file.append( + FileDescriptorProto.FromString( + Default().FindFileByName("google/protobuf/any.proto").serialized_pb + ) + ) + spec = entity_pb2.EntitySpec( + service_info=entity_pb2.ServiceInfo( + service_name="", + service_version="0.1.0", + service_runtime="Python " + + platform.python_version() + + " [" + + platform.python_implementation() + + " " + + platform.python_compiler() + + "]", + support_library_name="cloudstate-python-support", + support_library_version="0.1.0", + ), + entities=[ + entity_pb2.Entity( + entity_type=entity.entity_type(), + service_name=entity.service_descriptor.full_name, + persistence_id=entity.persistence_id, + ) + for entity in self.event_sourced_entities + + self.stateless_function_entities + ], + proto=descriptor_set.SerializeToString(), + ) + return spec + + def reportError(self, request, context): + logger.error(f"Report error: {request}") + pprint(request) + return Empty() diff --git a/cloudstate/event_sourced_context.py b/cloudstate/event_sourced_context.py new file mode 100644 index 0000000..7b229f8 --- /dev/null +++ b/cloudstate/event_sourced_context.py @@ -0,0 +1,46 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +from dataclasses import dataclass, field +from typing import Any, List + +from cloudstate.contexts import ClientActionContext +from cloudstate.entity_pb2 import Forward, SideEffect + + +@dataclass +class EventSourcedCommandContext(ClientActionContext): + """An event sourced command context. + Command Handler Methods may take this is a parameter. It allows emitting + new events in response to a command, along with forwarding the result to other + entities, and performing side effects on other entities""" + + command_name: str + command_id: int + entity_id: str + sequence: int + events: List[Any] = field(default_factory=list) + errors: List[str] = field(default_factory=list) + effects: List[SideEffect] = field(default_factory=list) + forward: Forward = None + + def emit(self, event): + """ + Emit the given event. The event will be persisted, and the handler of the + event defined in the current behavior will immediately be executed to pick it up + """ + self.events.append(event) + + +@dataclass +class SnapshotContext: + entity_id: str + sequence_number: int + + +@dataclass +class EventContext: + entity_id: str + sequence_number: int diff --git a/cloudstate/event_sourced_entity.py b/cloudstate/event_sourced_entity.py new file mode 100644 index 0000000..5e826e1 --- /dev/null +++ b/cloudstate/event_sourced_entity.py @@ -0,0 +1,202 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import inspect +from dataclasses import dataclass, field +from typing import Any, Callable, List, MutableMapping + +from google.protobuf import descriptor as _descriptor + +from cloudstate.event_sourced_context import ( + EventContext, + EventSourcedCommandContext, + SnapshotContext, +) +from cloudstate.event_sourced_pb2 import _EVENTSOURCED + + +@dataclass +class EventSourcedEntity: + service_descriptor: _descriptor.ServiceDescriptor + file_descriptors: List[_descriptor.FileDescriptor] + init_state: Callable[[str], Any] + persistence_id: str = None + snapshot_every: int = 0 + snapshot_function: Callable[[Any], Any] = None + snapshot_handler_function: Callable[[Any, Any], Any] = None + command_handlers: MutableMapping[str, Callable] = field(default_factory=dict) + event_handlers: MutableMapping[type, Callable] = field(default_factory=dict) + + def __post_init__(self): + if not self.persistence_id: + self.persistence_id = self.service_descriptor.full_name + + def entity_type(self): + return _EVENTSOURCED.full_name + + def snapshot(self): + def register_snapshot(function: Callable[[Any], Any]): + """ + Register the function to snapshot the state + """ + if self.snapshot_function: + raise Exception( + "Snapshot function {} already defined for this entity".format( + self.snapshot_function + ) + ) + if function.__code__.co_argcount > 2: + raise Exception( + "At most 2 parameters, the current state and the snapshot context, " + "should be accepted by the snapshot function" + ) + self.snapshot_function = function + return function + + return register_snapshot + + def snapshot_handler(self): + def register_snapshot_handler(function): + """ + Register the function to handle snapshots + """ + if self.snapshot_handler_function: + raise Exception( + f"Snapshot handler function {self.snapshot_handler_function} " + "already defined for this entity" + ) + if function.__code__.co_argcount > 2: + raise Exception( + "At most two parameters, the current state and the snapshot, " + "should be accepted by the snapshot_handler function" + ) + self.snapshot_handler_function = function + return function + + return register_snapshot_handler + + def command_handler(self, name: str): + def register_command_handler(function): + """ + Register the function to handle commands + """ + if name in self.command_handlers: + raise Exception( + "Command handler function {} already defined for command {}".format( + self.command_handlers[name], name + ) + ) + if function.__code__.co_argcount > 3: + raise Exception( + "At most three parameters, the current state, the command and the " + "context, should be accepted by the command_handler function" + ) + self.command_handlers[name] = function + return function + + return register_command_handler + + def event_handler(self, event_type: type): + def register_event_handler(function): + """ + Register the function to handle events + """ + if event_type in self.event_handlers: + raise Exception( + "Event handler function {} already defined for type {}".format( + self.event_handlers[event_type], event_type + ) + ) + if function.__code__.co_argcount > 2: + raise Exception( + "At most two parameters, the current state and the event, should " + "be accepted by the command_handler function" + ) + self.event_handlers[event_type] = function + return function + + return register_event_handler + + def name(self): + return self.service_descriptor.full_name + + +def invoke(function, parameters): + ordered_parameters = [] + for parameter_definition in inspect.signature(function).parameters.values(): + annotation = parameter_definition.annotation + if annotation == inspect._empty: + raise Exception( + f"Cannot inject parameter {parameter_definition.name} of function " + f"{function}: Missing type annotation" + ) + match_found = False + for param in parameters: + if isinstance(param, annotation): + match_found = True + ordered_parameters.append(param) + if not match_found: + raise Exception( + "Cannot inject parameter {} of function {}: No matching value".format( + parameter_definition.name, function + ) + ) + return function(*ordered_parameters) + + +@dataclass +class EventSourcedHandler: + entity: EventSourcedEntity + + def init_state(self, entity_id: str): + return self.entity.init_state(entity_id) + + def snapshot(self, current_state, snapshot_context: SnapshotContext): + if not self.entity.snapshot_function: + raise Exception( + "Missing snapshot function for entity {}".format(self.entity.name()) + ) + return invoke(self.entity.snapshot_function, [current_state, snapshot_context]) + + def handle_snapshot( + self, current_state, snapshot, snapshot_context: SnapshotContext + ): + if not self.entity.snapshot_handler_function: + raise Exception( + "Missing snapshot handler function for entity {}".format( + self.entity.name() + ) + ) + return invoke( + self.entity.snapshot_handler_function, + [current_state, snapshot, snapshot_context], + ) + + def handle_event(self, current_state, event, event_context: EventContext): + event_type = type(event) + handler_function = None + if event_type in self.entity.event_handlers: + handler_function = self.entity.event_handlers[event_type] + else: + for event_type, function in self.entity.event_handlers: + if isinstance(event, event_type): + handler_function = function + if not handler_function: + raise Exception( + f"Missing event handler function for entity {self.entity.name()} and " + f"event type {event_type}" + ) + return invoke(handler_function, [current_state, event, event_context]) + + def handle_command(self, current_state, command, ctx: EventSourcedCommandContext): + if ctx.command_name not in self.entity.command_handlers: + raise Exception( + f"Missing command handler function for entity {self.entity.name()} and " + f"command {ctx.command_name}" + ) + return invoke( + self.entity.command_handlers[ctx.command_name], + [current_state, command, ctx], + ) diff --git a/cloudstate/eventsourced_servicer.py b/cloudstate/eventsourced_servicer.py new file mode 100644 index 0000000..55d0428 --- /dev/null +++ b/cloudstate/eventsourced_servicer.py @@ -0,0 +1,144 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import logging +from pprint import pprint +from typing import List + +from cloudstate.utils.payload_utils import get_payload, pack +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.any_pb2 import Any + +from cloudstate.entity_pb2 import Command +from cloudstate.event_sourced_context import ( + EventContext, + EventSourcedCommandContext, + SnapshotContext, +) +from cloudstate.event_sourced_entity import EventSourcedEntity, EventSourcedHandler +from cloudstate.event_sourced_pb2 import ( + EventSourcedEvent, + EventSourcedInit, + EventSourcedReply, + EventSourcedSnapshot, + EventSourcedStreamOut, +) +from cloudstate.event_sourced_pb2_grpc import EventSourcedServicer + +_sym_db = _symbol_database.Default() + +TYPE_URL_PREFIX = "type.googleapis.com/" + + +class CloudStateEventSourcedServicer(EventSourcedServicer): + def __init__(self, event_sourced_entities: List[EventSourcedEntity]): + self.event_sourced_entities = { + entity.name(): entity for entity in event_sourced_entities + } + + def handle(self, request_iterator, context): + initiated = False + current_state = None + handler: EventSourcedHandler = None + entity_id: str = None + start_sequence_number: int = 0 + for request in request_iterator: + if not initiated: + if request.HasField("init"): + init: EventSourcedInit = request.init + service_name = init.service_name + entity_id = init.entity_id + if service_name not in self.event_sourced_entities: + raise Exception( + "No event sourced entity registered for service {}".format( + service_name + ) + ) + entity = self.event_sourced_entities[service_name] + handler = EventSourcedHandler(entity) + current_state = handler.init_state(entity_id) + initiated = True + if init.HasField("snapshot"): + event_sourced_snapshot: EventSourcedSnapshot = init.snapshot + start_sequence_number = event_sourced_snapshot.snapshot_sequence + snapshot = get_payload(event_sourced_snapshot.snapshot) + snapshot_context = SnapshotContext( + entity_id, start_sequence_number + ) + snapshot_result = handler.handle_snapshot( + current_state, snapshot, snapshot_context + ) + if snapshot_result: + current_state = snapshot_result + else: + raise Exception( + "Cannot handle {} before initialization".format(request) + ) + + elif request.HasField("event"): + event: EventSourcedEvent = request.event + evt = get_payload(event) + event_result = handler.handle_event( + current_state, evt, EventContext(entity_id, event.sequence) + ) + start_sequence_number = event.sequence + if event_result: + current_state = event_result + pprint("Handling event {}".format(event)) + elif request.HasField("command"): + command: Command = request.command + cmd = get_payload(command) + ctx = EventSourcedCommandContext( + command.name, command.id, entity_id, start_sequence_number + ) + result = None + try: + result = handler.handle_command(current_state, cmd, ctx) + except Exception as ex: + ctx.fail(str(ex)) + logging.exception("Failed to execute command:" + str(ex)) + + client_action = ctx.create_client_action(result, False) + event_sourced_reply = EventSourcedReply() + event_sourced_reply.command_id = command.id + event_sourced_reply.client_action.CopyFrom(client_action) + snapshot = None + perform_snapshot = False + if not ctx.has_errors(): + for number, event in enumerate(ctx.events): + sequence_number = start_sequence_number + number + 1 + event_result = handler.handle_event( + current_state, + event, + EventContext(entity_id, start_sequence_number + number), + ) + if event_result: + current_state = event_result + snapshot_every = handler.entity.snapshot_every + perform_snapshot = (snapshot_every > 0) and ( + perform_snapshot or (sequence_number % snapshot_every == 0) + ) + end_sequence_number = start_sequence_number + len(ctx.events) + if perform_snapshot: + snapshot = handler.snapshot( + current_state, + SnapshotContext(entity_id, end_sequence_number), + ) + + event_sourced_reply.side_effects.extend(ctx.effects) + event_sourced_reply.events.extend( + [pack(event) for event in ctx.events] + ) + if snapshot: + event_sourced_reply.snapshot.Pack(snapshot) + + output = EventSourcedStreamOut() + output.reply.CopyFrom(event_sourced_reply) + yield output + + else: + raise Exception( + "Cannot handle {} after initialization".format(type(request)) + ) diff --git a/cloudstate/function_servicer.py b/cloudstate/function_servicer.py new file mode 100644 index 0000000..66640c2 --- /dev/null +++ b/cloudstate/function_servicer.py @@ -0,0 +1,164 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import logging +from typing import List + +import grpc +from cloudstate.utils.payload_utils import get_payload +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.any_pb2 import Any +from grpc._server import _RequestIterator + +from cloudstate.entity_pb2 import ClientAction +from cloudstate.function_pb2 import FunctionCommand, FunctionReply +from cloudstate.function_pb2_grpc import StatelessFunctionServicer +from cloudstate.stateless_function_context import StatelessFunctionContext +from cloudstate.stateless_function_entity import ( + StatelessFunction, + StatelessFunctionHandler, +) + +_sym_db = _symbol_database.Default() + +TYPE_URL_PREFIX = "type.googleapis.com/" + + +class CloudStateStatelessFunctionServicer(StatelessFunctionServicer): + def __init__(self, stateless_function_entities: List[StatelessFunction]): + self.stateless_function_entities = { + entity.name(): entity for entity in stateless_function_entities + } + assert len(stateless_function_entities) == len(self.stateless_function_entities) + + def handleUnary(self, request: FunctionCommand, context): + logging.info(f"handling unary {request} {context}.") + if request.service_name in self.stateless_function_entities: + service = self.stateless_function_entities[request.service_name] + handler = StatelessFunctionHandler(service) + ctx = StatelessFunctionContext(request.name) + result = None + try: + result = handler.handle_unary( + get_payload(request), ctx + ) # the proto the user defined function returned. + except Exception as ex: + ctx.fail(str(ex)) + logging.exception("Failed to execute command:" + str(ex)) + + client_action: ClientAction = ctx.create_client_action(result, False) + function_reply = FunctionReply() + + if not ctx.has_errors(): + function_reply.side_effects.extend(ctx.effects) + if client_action.HasField("reply"): + function_reply.reply.CopyFrom(client_action.reply) + elif client_action.HasField("forward"): + function_reply.forward.CopyFrom(client_action.forward) + else: + function_reply.failure.CopyFrom(client_action.failure) + return function_reply + + def handleStreamed(self, request_iterator: _RequestIterator, context): + peek = request_iterator.next() # evidently, the first message has no payload + # and is probably intended to prime the stream handler. + if peek.service_name in self.stateless_function_entities: + handler = StatelessFunctionHandler( + self.stateless_function_entities[peek.service_name] + ) + logging.debug(f"set stream handler to {peek.service_name}") + else: + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + reconstructed = (get_payload(x) for x in request_iterator) + ctx = StatelessFunctionContext(peek.name) + try: + result = handler.handle_stream( + reconstructed, ctx + ) # the proto the user defined function returned. + for r in result: + client_action = ctx.create_client_action(r, False) + function_reply = FunctionReply() + if not ctx.has_errors(): + function_reply.side_effects.extend(ctx.effects) + if client_action.HasField("reply"): + function_reply.reply.CopyFrom(client_action.reply) + elif client_action.HasField("forward"): + function_reply.forward.CopyFrom(client_action.forward) + else: + function_reply.failure.CopyFrom(client_action.failure) + yield function_reply + + except Exception as ex: + ctx.fail(str(ex)) + logging.exception("Failed to execute command:" + str(ex)) + + def handleStreamedIn(self, request_iterator, context): + peek = request_iterator.next() # evidently, the first message has no payload + # and is probably intended to prime the stream handler. + logging.debug(f"peeked: {peek}") + if peek.service_name in self.stateless_function_entities: + handler = StatelessFunctionHandler( + self.stateless_function_entities[peek.service_name] + ) + logging.debug(f"set stream in handler to {peek.service_name}") + else: + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + reconstructed = (get_payload(x) for x in request_iterator) + ctx = StatelessFunctionContext(peek.name) + try: + result = handler.handle_stream_in( + reconstructed, ctx + ) # the proto the user defined function returned. + client_action = ctx.create_client_action(result, False) + function_reply = FunctionReply() + if not ctx.has_errors(): + function_reply.side_effects.extend(ctx.effects) + if client_action.HasField("reply"): + function_reply.reply.CopyFrom(client_action.reply) + elif client_action.HasField("forward"): + function_reply.forward.CopyFrom(client_action.forward) + else: + function_reply.failure.CopyFrom(client_action.failure) + return function_reply + + except Exception as ex: + ctx.fail(str(ex)) + logging.exception("Failed to execute command:" + str(ex)) + + def handleStreamedOut(self, request, context): + if request.service_name in self.stateless_function_entities: + handler = StatelessFunctionHandler( + self.stateless_function_entities[request.service_name] + ) + else: + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + reconstructed = get_payload(request) + ctx = StatelessFunctionContext(request.name) + try: + for result in handler.handle_stream_out(reconstructed, ctx): + client_action = ctx.create_client_action(result, False) + function_reply = FunctionReply() + if not ctx.has_errors(): + function_reply.side_effects.extend(ctx.effects) + if client_action.HasField("reply"): + function_reply.reply.CopyFrom(client_action.reply) + elif client_action.HasField("forward"): + function_reply.forward.CopyFrom(client_action.forward) + else: + function_reply.failure.CopyFrom(client_action.failure) + yield function_reply + + except Exception as ex: + ctx.fail(str(ex)) + logging.exception("Failed to execute command:" + str(ex)) diff --git a/cloudstate/stateless_function_context.py b/cloudstate/stateless_function_context.py new file mode 100644 index 0000000..5dd54db --- /dev/null +++ b/cloudstate/stateless_function_context.py @@ -0,0 +1,23 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import random +import sys +from dataclasses import dataclass, field +from typing import List + +from cloudstate.contexts import ClientActionContext +from cloudstate.entity_pb2 import Forward, SideEffect + + +@dataclass +class StatelessFunctionContext(ClientActionContext): + command_name: str + errors: List[str] = field(default_factory=list) + effects: List[SideEffect] = field(default_factory=list) + forward: Forward = None + + # todo: is this correct? there is no command_id on the stateless function requests. + command_id = random.randint(0, sys.maxsize) diff --git a/cloudstate/stateless_function_entity.py b/cloudstate/stateless_function_entity.py new file mode 100644 index 0000000..087b79a --- /dev/null +++ b/cloudstate/stateless_function_entity.py @@ -0,0 +1,188 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import inspect +from dataclasses import dataclass, field +from typing import Callable, List, MutableMapping + +from google.protobuf import descriptor as _descriptor + +from cloudstate.function_pb2 import _STATELESSFUNCTION +from cloudstate.stateless_function_context import StatelessFunctionContext +import logging + + +@dataclass +class StatelessFunction: + service_descriptor: _descriptor.ServiceDescriptor + file_descriptors: List[_descriptor.FileDescriptor] + unary_handlers: MutableMapping[str, Callable] = field(default_factory=dict) + stream_handlers: MutableMapping[str, Callable] = field(default_factory=dict) + stream_in_handlers: MutableMapping[str, Callable] = field(default_factory=dict) + stream_out_handlers: MutableMapping[str, Callable] = field(default_factory=dict) + + @property + def persistence_id(self): + return self.name() + + def entity_type(self): + return _STATELESSFUNCTION.full_name + + def unary_handler(self, name: str): + def register_unary_handler(function): + """ + Register the function to handle commands + """ + if name in self.unary_handlers: + raise Exception( + "Command handler function {} already defined for command {}".format( + self.unary_handlers[name], name + ) + ) + if function.__code__.co_argcount > 2: + raise Exception( + "At most two parameters, the command and the context, should be " + "accepted by the command_handler function" + ) + self.unary_handlers[name] = function + return function + + return register_unary_handler + + def stream_handler(self, name: str): + def register_stream_handler(function): + """ + Register the function to handle commands + """ + if name in self.stream_handlers: + raise Exception( + "Command handler function {} already defined for command {}".format( + self.unary_handlers[name], name + ) + ) + if function.__code__.co_argcount > 2: + raise Exception( + "At most two parameters, the command and the context, should be " + "accepted by the command_handler function" + ) + self.stream_handlers[name] = function + return function + + return register_stream_handler + + def stream_in_handler(self, name: str): + def register_stream_in_handler(function): + """ + Register the function to handle commands + """ + if name in self.stream_in_handlers: + raise Exception( + "Command handler function {} already defined for command {}".format( + self.unary_handlers[name], name + ) + ) + if function.__code__.co_argcount > 2: + raise Exception( + "At most two parameters, the command and the context, should be " + "accepted by the command_handler function" + ) + self.stream_in_handlers[name] = function + return function + + return register_stream_in_handler + + def stream_out_handler(self, name: str): + def register_stream_out_handler(function): + """ + Register the function to handle commands + """ + if name in self.stream_out_handlers: + raise Exception( + "Command handler function {} already defined for command {}".format( + self.unary_handlers[name], name + ) + ) + if function.__code__.co_argcount > 2: + raise Exception( + "At most two parameters, the command and the context, should be " + "accepted by the command_handler function" + ) + self.stream_out_handlers[name] = function + return function + + return register_stream_out_handler + + def name(self): + return self.service_descriptor.full_name + + +def invoke(function, parameters): + ordered_parameters = [] + for parameter_definition in inspect.signature(function).parameters.values(): + annotation = parameter_definition.annotation + if annotation == inspect._empty: + raise Exception( + f"Cannot inject parameter {parameter_definition.name} of function " + f"{function}: Missing type annotation" + ) + match_found = False + for param in parameters: + if isinstance(param, annotation): + match_found = True + ordered_parameters.append(param) + if not match_found: + raise Exception( + "Cannot inject parameter {} of function {}: No matching value".format( + parameter_definition.name, function + ) + ) + return function(*ordered_parameters) + + +class StatelessFunctionHandler: + def __init__(self, function: StatelessFunction): + self.function: StatelessFunction = function + self.logger = logging.getLogger(f"StatelessFunctionHandler {function.name()}") + + def handle_unary(self, command, ctx: StatelessFunctionContext): + if ctx.command_name not in self.function.unary_handlers: + raise Exception( + "Missing command handler function for entity {} and command {}".format( + self.function.name(), ctx.command_name + ) + ) + return invoke(self.function.unary_handlers[ctx.command_name], [command, ctx]) + + def handle_stream(self, command, ctx: StatelessFunctionContext): + self.logger.info(f"handling stream: {command} {ctx}") + if ctx.command_name not in self.function.stream_handlers: + raise Exception( + "Missing command handler function for entity {} and command {}".format( + self.function.name(), ctx.command_name + ) + ) + return invoke(self.function.stream_handlers[ctx.command_name], [command, ctx]) + + def handle_stream_in(self, command, ctx: StatelessFunctionContext): + if ctx.command_name not in self.function.stream_in_handlers: + raise Exception( + "Missing command handler function for entity {} and command {}".format( + self.function.name(), ctx.command_name + ) + ) + return invoke( + self.function.stream_in_handlers[ctx.command_name], [command, ctx] + ) + + def handle_stream_out(self, command, ctx: StatelessFunctionContext): + if ctx.command_name not in self.function.stream_out_handlers: + raise Exception( + "Missing command handler function for entity {} and command {}".format( + self.function.name(), ctx.command_name + ) + ) + return invoke( + self.function.stream_out_handlers[ctx.command_name], [command, ctx] + ) diff --git a/cloudstate/test/__init__.py b/cloudstate/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cloudstate/test/functiondemo/__init__.py b/cloudstate/test/functiondemo/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cloudstate/test/functiondemo/function_definition.py b/cloudstate/test/functiondemo/function_definition.py new file mode 100644 index 0000000..4f51b39 --- /dev/null +++ b/cloudstate/test/functiondemo/function_definition.py @@ -0,0 +1,87 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +from typing import Iterator + +from cloudstate.stateless_function_context import StatelessFunctionContext +from cloudstate.stateless_function_entity import StatelessFunction +from cloudstate.test.functiondemo.functiondemo2_pb2 import _FUNCTIONDEMO2 +from cloudstate.test.functiondemo.functiondemo2_pb2 import ( + DESCRIPTOR as FILE_DESCRIPTOR2, +) +from cloudstate.test.functiondemo.functiondemo2_pb2 import ( + FunctionRequest2, + FunctionResponse2, +) +from cloudstate.test.functiondemo.functiondemo_pb2 import _FUNCTIONDEMO +from cloudstate.test.functiondemo.functiondemo_pb2 import DESCRIPTOR as FILE_DESCRIPTOR +from cloudstate.test.functiondemo.functiondemo_pb2 import ( + FunctionRequest, + FunctionResponse, + SumTotal, +) + +definition = StatelessFunction(_FUNCTIONDEMO, [FILE_DESCRIPTOR]) + + +@definition.unary_handler("ReverseString") +def reverse_string( + arg: FunctionRequest, ctx: StatelessFunctionContext +) -> FunctionResponse: + if arg.foo == "boom": + ctx.fail("Intentionally failed.") + else: + return FunctionResponse(bar=arg.foo[::-1]) + + +@definition.stream_handler("ReverseStrings") +def reverse_strings( + arg: Iterator, ctx: StatelessFunctionContext +) -> Iterator[FunctionResponse]: + for element in arg: + if element.foo == "boom": + ctx.fail("Intentionally failed.") + + yield FunctionResponse(bar=element.foo[::-1]) + + +@definition.stream_in_handler("SumStream") +def sum_stream( + arg: Iterator, # todo, really need generics on this api but the + # reflection api doesn't allow it.. + ctx: StatelessFunctionContext, +) -> SumTotal: + total = 0 + for element in arg: + if element.quantity < 0: + ctx.fail("Intentionally failed.") + total += element.quantity + + return SumTotal(total=total) + + +@definition.stream_out_handler("SillyLetterStream") +def silly_letter_stream( + arg: FunctionRequest, ctx: StatelessFunctionContext +) -> SumTotal: + if arg.foo == "nope": + ctx.fail("Intentionally failed.") + letters = list(arg.foo) + for letter in letters: + yield FunctionResponse(bar=letter + "!!") + + +definition2 = StatelessFunction(_FUNCTIONDEMO2, [FILE_DESCRIPTOR2]) + + +@definition2.unary_handler("ReverseString2") +def reverse_string2( + arg: FunctionRequest2, ctx: StatelessFunctionContext +) -> FunctionResponse2: + if arg.foo == "boom": + ctx.fail("Intentionally failed.") + + else: + return FunctionResponse(bar=arg.foo[::-1] + "!") diff --git a/cloudstate/test/functiondemo/test_functiondemo.py b/cloudstate/test/functiondemo/test_functiondemo.py new file mode 100644 index 0000000..6cc4dbb --- /dev/null +++ b/cloudstate/test/functiondemo/test_functiondemo.py @@ -0,0 +1,87 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import logging + +import grpc +import pytest + +from cloudstate.test.functiondemo.functiondemo2_pb2 import FunctionRequest2 +from cloudstate.test.functiondemo.functiondemo2_pb2_grpc import FunctionDemo2Stub +from cloudstate.test.functiondemo.functiondemo_pb2 import ( + AddToSum, + FunctionRequest, + FunctionResponse, +) +from cloudstate.test.functiondemo.functiondemo_pb2_grpc import FunctionDemoStub +from cloudstate.test.run_test_server import run_test_server + +logger = logging.getLogger() + + +def evaluate_functiondemo_server(host: str, port: int): + server_hostport = f"{host}:{port}" + logger.info(f"connecting on {server_hostport}") + channel = grpc.insecure_channel(server_hostport) + + stub = FunctionDemoStub(channel) + request_oof = FunctionRequest(foo="oof") + response = stub.ReverseString(request_oof) + logger.info(f"resp: {response}") + assert response.bar == "foo" + + stub2 = FunctionDemo2Stub(channel) + response = stub2.ReverseString2(request_oof) + logger.info(f"resp: {response}") + assert response.bar == "foo!" + + request_boom2 = FunctionRequest2(foo="boom") + with pytest.raises(Exception): + stub2.ReverseString2(request_boom2) + logger.info("passed.") + + request_boom = FunctionRequest2(foo="boom") + requests = iter( + [FunctionRequest(foo=str(i) + ".") for i in range(10)] + + [request_boom] + + [FunctionRequest(foo=str(i) + "X") for i in range(10)] + ) + response = stub.ReverseStrings(requests) + last_response = None + with pytest.raises(Exception): + for r in response: + last_response = r + logger.info(f"streamed output: {r}") + assert last_response.bar == ".9" + + numbers_to_sum = iter([AddToSum(quantity=x) for x in [1, 2, 3, 4, 5, 6, 7, 8, 9]]) + sum = stub.SumStream(numbers_to_sum) + logger.info(sum) + assert sum.total == 45 + + numbers_to_fail_summing = iter( + AddToSum(quantity=x) for x in [1, 2, 3, 4, -1, 6, 7, 8, 9] + ) + + with pytest.raises(Exception): + stub.SumStream(numbers_to_fail_summing) + + resp = list(stub.SillyLetterStream(FunctionRequest(foo="wow"))) + assert resp == [ + FunctionResponse(bar="w!!"), + FunctionResponse(bar="o!!"), + FunctionResponse(bar="w!!"), + ] + + with pytest.raises(Exception): + resp = stub.SillyLetterStream(FunctionRequest(foo="nope")) + for i in resp: + logger.info(i) + + +def test_functiondemo(): + server_thread = run_test_server(port=8080) + evaluate_functiondemo_server("localhost", 8080) + server_thread.stop() diff --git a/cloudstate/test/run_test_server.py b/cloudstate/test/run_test_server.py new file mode 100644 index 0000000..11e28ba --- /dev/null +++ b/cloudstate/test/run_test_server.py @@ -0,0 +1,32 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import logging +import threading + +import pytest + +from cloudstate.cloudstate import CloudState +from cloudstate.test.functiondemo.function_definition import definition, definition2 +from cloudstate.test.shoppingcart import shopping_cart_entity + +logger = logging.getLogger() + + +def run_test_server( + run_shopping_cart: bool = True, run_function_demo: bool = True, port: int = 8080 +): + server_builder = CloudState().host("0.0.0.0").port(str(port)) + if run_shopping_cart: + logger.info("adding shoppingcart service") + server_builder = server_builder.register_event_sourced_entity( + shopping_cart_entity.entity + ) + if run_function_demo: + logger.info("adding functiondemo service") + server_builder = server_builder.register_stateless_function_entity(definition) + server_builder = server_builder.register_stateless_function_entity(definition2) + + return server_builder.start() diff --git a/cloudstate/test/shoppingcart/Dockerfile b/cloudstate/test/shoppingcart/Dockerfile new file mode 100644 index 0000000..ef91c42 --- /dev/null +++ b/cloudstate/test/shoppingcart/Dockerfile @@ -0,0 +1,9 @@ +FROM python:3.8.0-slim + +COPY ./dist /dist +RUN pip install /dist/cloudstate-0.1.2-py3-none-any.whl + +WORKDIR /app +COPY ./shoppingcart ./shoppingcart +ENV PYTHONPATH=/app +ENTRYPOINT python ./shoppingcart/shopping_cart.py \ No newline at end of file diff --git a/cloudstate/test/shoppingcart/__init__.py b/cloudstate/test/shoppingcart/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cloudstate/test/shoppingcart/persistence/__init__.py b/cloudstate/test/shoppingcart/persistence/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cloudstate/test/shoppingcart/requirements.txt b/cloudstate/test/shoppingcart/requirements.txt new file mode 100644 index 0000000..fbe56f0 --- /dev/null +++ b/cloudstate/test/shoppingcart/requirements.txt @@ -0,0 +1,6 @@ +attrs==19.3.0 +grpcio==1.28.1 +grpcio-tools==1.28.1 +protobuf==3.11.3 +pytest==5.4.2 +six==1.14.0 \ No newline at end of file diff --git a/cloudstate/test/shoppingcart/shopping_cart.py b/cloudstate/test/shoppingcart/shopping_cart.py new file mode 100644 index 0000000..6c5cbad --- /dev/null +++ b/cloudstate/test/shoppingcart/shopping_cart.py @@ -0,0 +1,15 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +from cloudstate.cloudstate import CloudState + +from cloudstate.test.shoppingcart.shopping_cart_entity import ( + entity as shopping_cart_entity, +) + +if __name__ == "__main__": + CloudState().port("8090").register_event_sourced_entity( + shopping_cart_entity + ).start() diff --git a/cloudstate/test/shoppingcart/shopping_cart_entity.py b/cloudstate/test/shoppingcart/shopping_cart_entity.py new file mode 100644 index 0000000..a7f058e --- /dev/null +++ b/cloudstate/test/shoppingcart/shopping_cart_entity.py @@ -0,0 +1,130 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +from dataclasses import dataclass, field +from typing import MutableMapping + +from google.protobuf.empty_pb2 import Empty + +from cloudstate.event_sourced_context import EventSourcedCommandContext +from cloudstate.event_sourced_entity import EventSourcedEntity + +from cloudstate.test.shoppingcart.persistence.domain_pb2 import Cart as DomainCart +from cloudstate.test.shoppingcart.persistence.domain_pb2 import ItemAdded, ItemRemoved +from cloudstate.test.shoppingcart.persistence.domain_pb2 import ( + LineItem as DomainLineItem, +) +from cloudstate.test.shoppingcart.shoppingcart_pb2 import _SHOPPINGCART +from cloudstate.test.shoppingcart.shoppingcart_pb2 import DESCRIPTOR as FILE_DESCRIPTOR +from cloudstate.test.shoppingcart.shoppingcart_pb2 import ( + AddLineItem, + Cart, + GetShoppingCart, + LineItem, + RemoveLineItem, +) + + +@dataclass +class ShoppingCartState: + entity_id: str + cart: MutableMapping[str, LineItem] = field(default_factory=dict) + + +def init(entity_id: str) -> ShoppingCartState: + return ShoppingCartState(entity_id) + + +entity = EventSourcedEntity(_SHOPPINGCART, [FILE_DESCRIPTOR], init) + + +def to_domain_line_item(item): + domain_item = DomainLineItem() + domain_item.productId = item.product_id + domain_item.name = item.name + domain_item.quantity = item.quantity + return domain_item + + +@entity.snapshot() +def snapshot(state: ShoppingCartState): + cart = DomainCart() + cart.items = [to_domain_line_item(item) for item in state.cart.values()] + return cart + + +def to_line_item(domain_item): + item = LineItem() + item.product_id = domain_item.productId + item.name = domain_item.name + item.quantity = domain_item.quantity + return item + + +@entity.snapshot_handler() +def handle_snapshot(state: ShoppingCartState, domain_cart: DomainCart): + state.cart = { + domain_item.productId: to_line_item(domain_item) + for domain_item in domain_cart.items + } + + +@entity.event_handler(ItemAdded) +def item_added(state: ShoppingCartState, event: ItemAdded): + cart = state.cart + if event.item.productId in cart: + item = cart[event.item.productId] + item.quantity = item.quantity + event.item.quantity + else: + item = to_line_item(event.item) + cart[item.product_id] = item + + +@entity.event_handler(ItemRemoved) +def item_removed(state: ShoppingCartState, event: ItemRemoved): + del state.cart[event.productId] + + +@entity.command_handler("GetCart") +def get_cart( + state: ShoppingCartState, item: GetShoppingCart, ctx: EventSourcedCommandContext +): + print(f"get shopping cart: {item}") + cart = Cart() + cart.items.extend(state.cart.values()) + + return cart + + +@entity.command_handler("AddItem") +def add_item(item: AddLineItem, ctx: EventSourcedCommandContext): + if item.quantity <= 0: + ctx.fail( + f"Cannot add negative quantity of to item {item.product_id} at request " + f"{item}" + ) + else: + item_added_event = ItemAdded() + item_added_event.item.CopyFrom(to_domain_line_item(item)) + ctx.emit(item_added_event) + return Empty() + + +@entity.command_handler("RemoveItem") +def remove_item( + state: ShoppingCartState, item: RemoveLineItem, ctx: EventSourcedCommandContext +): + cart = state.cart + if item.product_id not in cart: + ctx.fail( + "Cannot remove item {} because it is not in the cart.".format( + item.product_id + ) + ) + else: + item_removed_event = ItemRemoved() + item_removed_event.productId = item.product_id + ctx.emit(item_removed_event) + return Empty() diff --git a/cloudstate/test/shoppingcart/test_shoppingcart.py b/cloudstate/test/shoppingcart/test_shoppingcart.py new file mode 100644 index 0000000..576caac --- /dev/null +++ b/cloudstate/test/shoppingcart/test_shoppingcart.py @@ -0,0 +1,36 @@ +import grpc + + +import logging + +from cloudstate.test.shoppingcart.shoppingcart_pb2 import GetShoppingCart, AddLineItem +from cloudstate.test.shoppingcart.shoppingcart_pb2_grpc import ShoppingCartStub + +from cloudstate.test.run_test_server import run_test_server + +logger = logging.getLogger() + + +def evaluate_shoppingcart_server(host: str, port: int): + logger.info(f"host: {host}") + logger.info(f"port: {port}") + server_hostport = f"{host}:{port}" + logger.info(f"connecting on {server_hostport}") + channel = grpc.insecure_channel(server_hostport) + + stub = ShoppingCartStub(channel) + request = GetShoppingCart(user_id="leeroy") + response = stub.GetCart(request) + logger.info(f"resp: {response}") + + stub.AddItem( + AddLineItem(user_id="leeroy", product_id="0", name="beer", quantity=24) + ) + response = stub.GetCart(request) + logger.info(f"resp: {response}") + + +def test_shoppingcart(): + server_thread = run_test_server(port=8080) + evaluate_shoppingcart_server("localhost", 8080) + server_thread.stop() diff --git a/cloudstate/test/tck_services.py b/cloudstate/test/tck_services.py new file mode 100644 index 0000000..4c1f5d3 --- /dev/null +++ b/cloudstate/test/tck_services.py @@ -0,0 +1,35 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +import sys +from logging import getLogger + +from cloudstate.test.run_test_server import run_test_server +from cloudstate.test.functiondemo.test_functiondemo import evaluate_functiondemo_server +from cloudstate.test.shoppingcart.test_shoppingcart import evaluate_shoppingcart_server + +logger = getLogger() + +if __name__ == "__main__": + if len(sys.argv) < 1 or sys.argv[1] == "server": + logger.info("starting server") + + run_test_server( + "shoppingcart" in sys.argv, "functiondemo" in sys.argv, port=8080 + ).wait_for_termination() + elif sys.argv[1] == "client": + assert "server_host" in sys.argv + server = sys.argv[sys.argv.index("server_host") + 1] + if "server_port" in sys.argv: + server_port = sys.argv[sys.argv.index("server_port") + 1] + else: + server_port = 9000 + + if "shoppingcart" in sys.argv: + evaluate_shoppingcart_server(server, server_port) + if "functiondemo" in sys.argv: + evaluate_functiondemo_server(server, server_port) + else: + raise Exception("please specify client or server.") diff --git a/cloudstate/utils/__init__.py b/cloudstate/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/cloudstate/utils/payload_utils.py b/cloudstate/utils/payload_utils.py new file mode 100644 index 0000000..ddba73c --- /dev/null +++ b/cloudstate/utils/payload_utils.py @@ -0,0 +1,27 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.any_pb2 import Any + +_sym_db = _symbol_database.Default() + +TYPE_URL_PREFIX = "type.googleapis.com/" + + +def get_payload(command): + command_type: str = command.payload.type_url + if command_type.startswith(TYPE_URL_PREFIX): + command_type = command_type[len(TYPE_URL_PREFIX) :] + command_class = _sym_db.GetSymbol(command_type) + cmd = command_class() + cmd.ParseFromString(command.payload.value) + return cmd + + +def pack(event): + any = Any() + any.Pack(event) + return any diff --git a/cloudstate/version.py b/cloudstate/version.py new file mode 100644 index 0000000..bebba0b --- /dev/null +++ b/cloudstate/version.py @@ -0,0 +1,6 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" + +__version__ = "0.1.2" diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..958296d --- /dev/null +++ b/docs/README.md @@ -0,0 +1,23 @@ +# Cloudstate Python documentation + +Documentation source for Cloudstate Python, published to https://cloudstate.io/docs/python/current/ + +To build the docs with [sbt](https://www.scala-sbt.org): + +``` +sbt paradox +``` + +Can also first start the sbt interactive shell with `sbt`, then run commands. + +The documentation can be viewed locally by opening the generated pages: + +``` +open target/paradox/site/main/index.html +``` + +To watch files for changes and rebuild docs automatically: + +``` +sbt ~paradox +``` diff --git a/docs/build.sbt b/docs/build.sbt new file mode 100644 index 0000000..9b40a52 --- /dev/null +++ b/docs/build.sbt @@ -0,0 +1,10 @@ +lazy val docs = project + .in(file(".")) + .enablePlugins(CloudstateParadoxPlugin) + .settings( + deployModule := "python", + paradoxProperties in Compile ++= Map( + "cloudstate.python.version" -> { if (isSnapshot.value) previousStableVersion.value.getOrElse("0.0.0") else version.value }, + "extref.cloudstate.base_url" -> "https://cloudstate.io/docs/core/current/%s" + ) + ) diff --git a/docs/project/build.properties b/docs/project/build.properties new file mode 100644 index 0000000..654fe70 --- /dev/null +++ b/docs/project/build.properties @@ -0,0 +1 @@ +sbt.version=1.3.12 diff --git a/docs/project/plugins.sbt b/docs/project/plugins.sbt new file mode 100644 index 0000000..2afe97a --- /dev/null +++ b/docs/project/plugins.sbt @@ -0,0 +1,2 @@ +addSbtPlugin("com.dwijnand" % "sbt-dynver" % "4.0.0") +addSbtPlugin("io.cloudstate" % "sbt-cloudstate-paradox" % "0.1.2") diff --git a/docs/src/main/paradox/gettingstarted.md b/docs/src/main/paradox/gettingstarted.md new file mode 100644 index 0000000..58863c1 --- /dev/null +++ b/docs/src/main/paradox/gettingstarted.md @@ -0,0 +1,11 @@ +# Getting started + +Install current version: + +@@@vars +``` +pip install cloudstate==$cloudstate.python.version$ +``` +@@@ + +Link to @extref:[event sourcing](cloudstate:user/features/eventsourced.html) diff --git a/docs/src/main/paradox/index.md b/docs/src/main/paradox/index.md new file mode 100644 index 0000000..b2f9f2f --- /dev/null +++ b/docs/src/main/paradox/index.md @@ -0,0 +1,7 @@ +# Cloudstate Python + +Link to @extref:[core docs](cloudstate:index.html) + +@@@ index +* [Getting started](gettingstarted.md) +@@@ diff --git a/extended_tck.sh b/extended_tck.sh new file mode 100755 index 0000000..307156e --- /dev/null +++ b/extended_tck.sh @@ -0,0 +1,66 @@ +#!/usr/bin/env bash + +RUN_SUFFIX=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 10 | head -n 1) + +PROXY_NAME=cloudstate-proxy-$RUN_SUFFIX +USER_FUNCTION_NAME=cloudstate-function-$RUN_SUFFIX +FUNCTION_CLIENT_NAME=cloudstate-function-client-$RUN_SUFFIX +TCK_NAME=cloudstate-tck-$RUN_SUFFIX +NETWORK_NAME=tck-network-$RUN_SUFFIX + +# fresh docker build +docker build -t dev-cloudstate-tck:local ./ + +docker network create $NETWORK_NAME + +# primary tck tests for shopping cart +docker run -d --network $NETWORK_NAME --name $PROXY_NAME -p 9000:9000 \ + -e USER_FUNCTION_HOST=$TCK_NAME \ + -e USER_FUNCTION_PORT=8090 \ + cloudstateio/cloudstate-proxy-dev-mode +sleep 10 +docker run -d --network $NETWORK_NAME --name $USER_FUNCTION_NAME -p 8080:8080 dev-cloudstate-tck:local \ + server \ + shoppingcart +sleep 10 +docker run --rm --network $NETWORK_NAME --name $TCK_NAME -p 8090:8090 \ + -e TCK_HOST=0.0.0.0 \ + -e TCK_PROXY_HOST=$PROXY_NAME \ + -e TCK_FRONTEND_HOST=$USER_FUNCTION_NAME \ + cloudstateio/cloudstate-tck + +status=$? +echo "Removing docker containers" +docker rm -f $PROXY_NAME +docker rm -f $USER_FUNCTION_NAME + +# secondary integration tests for stateless function: +docker run -d --network $NETWORK_NAME --name $USER_FUNCTION_NAME -p 8080:8080 dev-cloudstate-tck:local \ + server \ + functiondemo \ + shoppingcart +sleep 10 +docker run -d --network $NETWORK_NAME --name $PROXY_NAME -p 9000:9000 \ + -e USER_FUNCTION_HOST=$USER_FUNCTION_NAME \ + -e USER_FUNCTION_PORT=8080 \ + cloudstateio/cloudstate-proxy-dev-mode +sleep 10 +docker run --network $NETWORK_NAME --name $FUNCTION_CLIENT_NAME dev-cloudstate-tck:local \ + client \ + server_host $PROXY_NAME \ + functiondemo \ + shoppingcart + +status1=$? + +docker rm -f $PROXY_NAME +docker rm -f $USER_FUNCTION_NAME + +docker network rm $NETWORK_NAME + +RETURNSTATUS=1 +if [ "${status1}" == 0 ] && [ "${status}" == 0 ]; then + RETURNSTATUS=0 +fi + +exit $RETURNSTATUS diff --git a/protobuf/lib/cloudstate/crdt.proto b/protobuf/lib/cloudstate/crdt.proto new file mode 100644 index 0000000..5c8135d --- /dev/null +++ b/protobuf/lib/cloudstate/crdt.proto @@ -0,0 +1,379 @@ +// Copyright 2019 Lightbend Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gRPC interface for CRDT Entity user functions. + +syntax = "proto3"; + +package cloudstate.crdt; + +// Any is used so that domain events defined according to the functions business domain can be embedded inside +// the protocol. +import "google/protobuf/any.proto"; +import "cloudstate/entity.proto"; + +option java_package = "io.cloudstate.protocol"; +option go_package = "cloudstate/protocol"; + +// CRDT Protocol +// +// Note that while this protocol provides support for CRDTs, the data types sent across the protocol are not CRDTs +// themselves. It is the responsibility of the CloudState proxy to implement the CRDTs, merge functions, vector clocks +// etc, not the user function. The user function need only hold the current value in memory, and this protocol sends +// deltas to the user function to update its in memory value as necessary. These deltas have no way of dealing with +// conflicts, hence it important that the CloudState proxy always know what the state of the user functions in memory +// value is before sending a delta. If the CloudState proxy is not sure what the value is, eg because it has just sent +// an operation to the user function may have updated its value as a result, the proxy should wait until it gets the +// result of the operation back, to ensure its in memory value is in sync with the user function so that it can +// calculate deltas that won't conflict. +// +// The user function is expected to update its value both as the result of receiving deltas from the proxy, as well as +// when it sends deltas. It must not update its value in any other circumstance, updating the value in response to any +// other stimuli risks the value becoming out of sync with the CloudState proxy. The user function will not be sent +// back deltas as a result of its own changes. +// +// An invocation of handle is made for each entity being handled. It may be kept alive and used to handle multiple +// commands, and may subsequently be terminated if that entity becomes idle, or if the entity is deleted. Shutdown is +// typically done for efficiency reasons, unless the entity is explicitly deleted, a terminated handle stream does not +// mean the proxy has stopped tracking the state of the entity in its memory. +// +// Special care must be taken when working with maps and sets. The keys/values are google.protobuf.Any, which encodes +// the value as binary protobuf, however, serialized protobufs are not stable, two semantically equal objects could +// encode to different bytes. It is the responsibility of the user function to ensure that stable encodings are used. +service Crdt { + + // After invoking handle, the first message sent will always be a CrdtInit message, containing the entity ID, and, + // if it exists or is available, the current state of the entity. After that, one or more commands may be sent, + // as well as deltas as they arrive, and the entire state if either the entity is created, or the proxy wishes the + // user function to replace its entire state. + // + // The user function must respond with one reply per command in. They do not necessarily have to be sent in the same + // order that the commands were sent, the command ID is used to correlate commands to replies. + rpc handle(stream CrdtStreamIn) returns (stream CrdtStreamOut); +} + +// Message for the Crdt handle stream in. +message CrdtStreamIn { + oneof message { + + // Always sent first, and only once. + CrdtInit init = 1; + + // Sent to indicate the user function should replace its current state with this state. If the user function + // does not have a current state, either because the init function didn't send one and the user function hasn't + // updated the state itself in response to a command, or because the state was deleted, this must be sent before + // any deltas. + CrdtState state = 2; + + // A delta to be applied to the current state. May be sent at any time as long as the user function already has + // state. + CrdtDelta changed = 3; + + // Delete the entity. May be sent at any time. The user function should clear its state when it receives this. + // A proxy may decide to terminate the stream after sending this. + CrdtDelete deleted = 4; + + // A command, may be sent at any time. + Command command = 5; + + // A stream has been cancelled. + StreamCancelled stream_cancelled = 6; + } +} + +// Message for the Crdt handle stream out. +message CrdtStreamOut { + oneof message { + // A reply to an incoming command. Either one reply, or one failure, must be sent in response to each command. + CrdtReply reply = 1; + // A streamed message. + CrdtStreamedMessage streamed_message = 2; + // A stream cancelled response, may be sent in response to stream_cancelled. + CrdtStreamCancelledResponse stream_cancelled_response = 3; + // A failure. Either sent in response to a command, or sent if some other error occurs. + Failure failure = 4; + } +} + +// The CRDT state. This represents the full state of a CRDT. When received, a user function should replace the current +// state with this, not apply it as a delta. This includes both for the top level CRDT, and embedded CRDTs, such as +// the values of an ORMap. +message CrdtState { + oneof state { + // A Grow-only Counter + GCounterState gcounter = 1; + + // A Positve-Negative Counter + PNCounterState pncounter = 2; + + // A Grow-only Set + GSetState gset = 3; + + // An Observed-Removed Set + ORSetState orset = 4; + + // A Last-Write-Wins Register + LWWRegisterState lwwregister = 5; + + // A Flag + FlagState flag = 6; + + // An Observed-Removed Map + ORMapState ormap = 7; + + // A vote + VoteState vote = 8; + } +} + +// A Grow-only counter +// +// A G-Counter can only be incremented, it can't be decremented. +message GCounterState { + + // The current value of the counter. + uint64 value = 1; +} + +// A Positve-Negative Counter +// +// A PN-Counter can be both incremented and decremented. +message PNCounterState { + + // The current value of the counter. + int64 value = 1; +} + +// A Grow-only Set +// +// A G-Set can only have items added, items cannot be removed. +message GSetState { + + // The current items in the set. + repeated google.protobuf.Any items = 1; +} + +// An Observed-Removed Set +// +// An OR-Set may have items added and removed, with the condition that an item must be observed to be in the set before +// it is removed. +message ORSetState { + + // The current items in the set. + repeated google.protobuf.Any items = 1; +} + +// A Last-Write-Wins Register +// +// A LWW-Register holds a single value, with the current value being selected based on when it was last written. +// The time of the last write may either be determined using the proxies clock, or may be based on a custom, domain +// specific value. +message LWWRegisterState { + + // The current value of the register. + google.protobuf.Any value = 1; + + // The clock to use if this state needs to be merged with another one. + CrdtClock clock = 2; + + // The clock value if the clock in use is a custom clock. + int64 custom_clock_value = 3; +} + +// A Flag +// +// A Flag is a boolean value, that once set to true, stays true. +message FlagState { + + // The current value of the flag. + bool value = 1; +} + +// An Observed-Removed Map +// +// Like an OR-Set, an OR-Map may have items added and removed, with the condition that an item must be observed to be +// in the map before it is removed. The values of the map are CRDTs themselves. Different keys are allowed to use +// different CRDTs, and if an item is removed, and then replaced, the new value may be a different CRDT. +message ORMapState { + + // The entries of the map. + repeated ORMapEntry entries = 1; +} + +// An OR-Map entry. +message ORMapEntry { + + // The entry key. + google.protobuf.Any key = 1; + + // The value of the entry, a CRDT itself. + CrdtState value = 2; +} + +// A Vote. This allows nodes to vote on something. +message VoteState { + + // The number of votes for + uint32 votes_for = 1; + + // The total number of voters + uint32 total_voters = 2; + + // The vote of the current node, which is included in the above two numbers + bool self_vote = 3; +} + +// A CRDT delta +// +// Deltas only carry the change in value, not the full value (unless +message CrdtDelta { + oneof delta { + GCounterDelta gcounter = 1; + PNCounterDelta pncounter = 2; + GSetDelta gset = 3; + ORSetDelta orset = 4; + LWWRegisterDelta lwwregister = 5; + FlagDelta flag = 6; + ORMapDelta ormap = 7; + VoteDelta vote = 8; + } +} + +message GCounterDelta { + uint64 increment = 1; +} + +message PNCounterDelta { + sint64 change = 1; +} + +message GSetDelta { + repeated google.protobuf.Any added = 1; +} + +message ORSetDelta { + // If cleared is set, the set must be cleared before added is processed. + bool cleared = 1; + repeated google.protobuf.Any removed = 2; + repeated google.protobuf.Any added = 3; +} + +message LWWRegisterDelta { + google.protobuf.Any value = 1; + CrdtClock clock = 2; + int64 custom_clock_value = 3; +} + +message FlagDelta { + bool value = 1; +} + +message ORMapDelta { + bool cleared = 1; + repeated google.protobuf.Any removed = 2; + repeated ORMapEntryDelta updated = 3; + repeated ORMapEntry added = 4; +} + +message ORMapEntryDelta { + // The entry key. + google.protobuf.Any key = 1; + + CrdtDelta delta = 2; +} + +message VoteDelta { + // Only set by the user function to change the nodes current vote. + bool self_vote = 1; + + // Only set by the proxy to change the votes for and total voters. + int32 votes_for = 2; + int32 total_voters = 3; +} + +message CrdtInit { + string service_name = 1; + string entity_id = 2; + CrdtState state = 3; +} + +message CrdtDelete { +} + +message CrdtReply { + + int64 command_id = 1; + + ClientAction client_action = 2; + + repeated SideEffect side_effects = 4; + + CrdtStateAction state_action = 5; + + // If the request was streamed, setting this to true indicates that the command should + // be handled as a stream. Subsequently, the user function may send CrdtStreamedMessage, + // and a CrdtStreamCancelled message will be sent if the stream is cancelled (though + // not if the a CrdtStreamedMessage ends the stream first). + bool streamed = 6; +} + +message CrdtStateAction { + oneof action { + CrdtState create = 5; + CrdtDelta update = 6; + CrdtDelete delete = 7; + } + + CrdtWriteConsistency write_consistency = 8; +} + +// May be sent as often as liked if the first reply set streamed to true +message CrdtStreamedMessage { + + int64 command_id = 1; + + ClientAction client_action = 2; + + repeated SideEffect side_effects = 3; + + // Indicates the stream should end, no messages may be sent for this command after this. + bool end_stream = 4; +} + +message CrdtStreamCancelledResponse { + int64 command_id = 1; + + repeated SideEffect side_effects = 2; + + CrdtStateAction state_action = 3; +} + +enum CrdtWriteConsistency { + LOCAL = 0; + MAJORITY = 1; + ALL = 2; +} + +enum CrdtClock { + // Use the default clock for deciding the last write, which is the system clocks + // milliseconds since epoch. + DEFAULT = 0; + // Use the reverse semantics with the default clock, to enable first write wins. + REVERSE = 1; + // Use a custom clock value, set using custom_clock_value. + CUSTOM = 2; + // Use a custom clock value, but automatically increment it by one if the clock + // value from the current value is equal to the custom_clock_value. + CUSTOM_AUTO_INCREMENT = 3; +} diff --git a/protobuf/lib/cloudstate/entity.proto b/protobuf/lib/cloudstate/entity.proto new file mode 100644 index 0000000..671b938 --- /dev/null +++ b/protobuf/lib/cloudstate/entity.proto @@ -0,0 +1,191 @@ +// Copyright 2019 Lightbend Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gRPC interface for common messages and services for Entity user functions. + +syntax = "proto3"; + +package cloudstate; + +// Any is used so that domain events defined according to the functions business domain can be embedded inside +// the protocol. +import "google/protobuf/any.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/descriptor.proto"; + +option java_package = "io.cloudstate.protocol"; +option go_package = "cloudstate/protocol"; + +// A reply to the sender. +message Reply { + // The reply payload + google.protobuf.Any payload = 1; +} + +// Forwards handling of this request to another entity. +message Forward { + // The name of the service to forward to. + string service_name = 1; + // The name of the command. + string command_name = 2; + // The payload. + google.protobuf.Any payload = 3; +} + +// An action for the client +message ClientAction { + oneof action { + + // Send a reply + Reply reply = 1; + + // Forward to another entity + Forward forward = 2; + + // Send a failure to the client + Failure failure = 3; + } +} + +// A side effect to be done after this command is handled. +message SideEffect { + + // The name of the service to perform the side effect on. + string service_name = 1; + + // The name of the command. + string command_name = 2; + + // The payload of the command. + google.protobuf.Any payload = 3; + + // Whether this side effect should be performed synchronously, ie, before the reply is eventually + // sent, or not. + bool synchronous = 4; +} + +// A command. For each command received, a reply must be sent with a matching command id. +message Command { + + // The ID of the entity. + string entity_id = 1; + + // A command id. + int64 id = 2; + + // Command name + string name = 3; + + // The command payload. + google.protobuf.Any payload = 4; + + // Whether the command is streamed or not + bool streamed = 5; +} + +message StreamCancelled { + + // The ID of the entity + string entity_id = 1; + + // The command id + int64 id = 2; +} + +// A failure reply. If this is returned, it will be translated into a gRPC unknown +// error with the corresponding description if supplied. +message Failure { + + // The id of the command being replied to. Must match the input command. + int64 command_id = 1; + + // A description of the error. + string description = 2; +} + +message EntitySpec { + // This should be the Descriptors.FileDescriptorSet in proto serialized from as generated by: + // protoc --include_imports \ + // --proto_path= \ + // --descriptor_set_out=user-function.desc \ + // + bytes proto = 1; + + // The entities being served. + repeated Entity entities = 2; + + // Optional information about the service. + ServiceInfo service_info = 3; +} + +// Information about the service that proxy is proxying to. +// All of the information in here is optional. It may be useful for debug purposes. +message ServiceInfo { + + // The name of the service, eg, "shopping-cart". + string service_name = 1; + + // The version of the service. + string service_version = 2; + + // A description of the runtime for the service. Can be anything, but examples might be: + // - node v10.15.2 + // - OpenJDK Runtime Environment 1.8.0_192-b12 + string service_runtime = 3; + + // If using a support library, the name of that library, eg "cloudstate" + string support_library_name = 4; + + // The version of the support library being used. + string support_library_version = 5; +} + +message Entity { + + // The type of entity. By convention, this should be a fully qualified entity protocol grpc + // service name, for example, cloudstate.eventsourced.EventSourced. + string entity_type = 1; + + // The name of the service to load from the protobuf file. + string service_name = 2; + + // The ID to namespace state by. How this is used depends on the type of entity, for example, + // event sourced entities will prefix this to the persistence id. + string persistence_id = 3; +} + +message UserFunctionError { + string message = 1; +} + +message ProxyInfo { + int32 protocol_major_version = 1; + int32 protocol_minor_version = 2; + string proxy_name = 3; + string proxy_version = 4; + repeated string supported_entity_types = 5; +} + +// Entity discovery service. +service EntityDiscovery { + + // Discover what entities the user function wishes to serve. + rpc discover(ProxyInfo) returns (EntitySpec) {} + + // Report an error back to the user function. This will only be invoked to tell the user function + // that it has done something wrong, eg, violated the protocol, tried to use an entity type that + // isn't supported, or attempted to forward to an entity that doesn't exist, etc. These messages + // should be logged clearly for debugging purposes. + rpc reportError(UserFunctionError) returns (google.protobuf.Empty) {} +} diff --git a/protobuf/lib/cloudstate/event_sourced.proto b/protobuf/lib/cloudstate/event_sourced.proto new file mode 100644 index 0000000..0417f14 --- /dev/null +++ b/protobuf/lib/cloudstate/event_sourced.proto @@ -0,0 +1,115 @@ +// Copyright 2019 Lightbend Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gRPC interface for Event Sourced Entity user functions. + +syntax = "proto3"; + +package cloudstate.eventsourced; + +// Any is used so that domain events defined according to the functions business domain can be embedded inside +// the protocol. +import "google/protobuf/any.proto"; +import "cloudstate/entity.proto"; + +option java_package = "io.cloudstate.protocol"; +option go_package = "cloudstate/protocol"; + +// The init message. This will always be the first message sent to the entity when +// it is loaded. +message EventSourcedInit { + + string service_name = 1; + + // The ID of the entity. + string entity_id = 2; + + // If present the entity should initialise its state using this snapshot. + EventSourcedSnapshot snapshot = 3; +} + +// A snapshot +message EventSourcedSnapshot { + + // The sequence number when the snapshot was taken. + int64 snapshot_sequence = 1; + + // The snapshot. + google.protobuf.Any snapshot = 2; +} + +// An event. These will be sent to the entity when the entity starts up. +message EventSourcedEvent { + + // The sequence number of the event. + int64 sequence = 1; + + // The event payload. + google.protobuf.Any payload = 2; +} + +// A reply to a command. +message EventSourcedReply { + + // The id of the command being replied to. Must match the input command. + int64 command_id = 1; + + // The action to take + ClientAction client_action = 2; + + // Any side effects to perform + repeated SideEffect side_effects = 3; + + // A list of events to persist - these will be persisted before the reply + // is sent. + repeated google.protobuf.Any events = 4; + + // An optional snapshot to persist. It is assumed that this snapshot will have + // the state of any events in the events field applied to it. It is illegal to + // send a snapshot without sending any events. + google.protobuf.Any snapshot = 5; +} + +// Input message type for the gRPC stream in. +message EventSourcedStreamIn { + oneof message { + EventSourcedInit init = 1; + EventSourcedEvent event = 2; + Command command = 3; + } +} + +// Output message type for the gRPC stream out. +message EventSourcedStreamOut { + oneof message { + EventSourcedReply reply = 1; + Failure failure = 2; + } +} + +// The Entity service +service EventSourced { + + // The stream. One stream will be established per active entity. + // Once established, the first message sent will be Init, which contains the entity ID, and, + // if the entity has previously persisted a snapshot, it will contain that snapshot. It will + // then send zero to many event messages, one for each event previously persisted. The entity + // is expected to apply these to its state in a deterministic fashion. Once all the events + // are sent, one to many commands are sent, with new commands being sent as new requests for + // the entity come in. The entity is expected to reply to each command with exactly one reply + // message. The entity should reply in order, and any events that the entity requests to be + // persisted the entity should handle itself, applying them to its own state, as if they had + // arrived as events when the event stream was being replayed on load. + rpc handle(stream EventSourcedStreamIn) returns (stream EventSourcedStreamOut) {} +} diff --git a/protobuf/lib/cloudstate/function.proto b/protobuf/lib/cloudstate/function.proto new file mode 100644 index 0000000..d0e54ea --- /dev/null +++ b/protobuf/lib/cloudstate/function.proto @@ -0,0 +1,61 @@ +// Copyright 2019 Lightbend Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// gRPC interface for Stateless Entity user functions. + +syntax = "proto3"; + +package cloudstate.function; + +// Any is used so that domain events defined according to the functions business domain can be embedded inside +// the protocol. +import "google/protobuf/any.proto"; +import "cloudstate/entity.proto"; + +option java_package = "io.cloudstate.protocol"; +option go_package = "cloudstate/protocol"; + +message FunctionCommand { + // The name of the service this function is on. + string service_name = 2; + + // Command name + string name = 3; + + // The command payload. + google.protobuf.Any payload = 4; +} + +message FunctionReply { + + oneof response { + Failure failure = 1; + Reply reply = 2; + Forward forward = 3; + } + + repeated SideEffect side_effects = 4; +} + +service StatelessFunction { + + rpc handleUnary(FunctionCommand) returns (FunctionReply) {} + + rpc handleStreamedIn(stream FunctionCommand) returns (FunctionReply) {} + + rpc handleStreamedOut(FunctionCommand) returns (stream FunctionReply) {} + + rpc handleStreamed(stream FunctionCommand) returns (stream FunctionReply) {} + +} diff --git a/protobuf/lib/google/api/annotations.proto b/protobuf/lib/google/api/annotations.proto new file mode 100644 index 0000000..f7bcab1 --- /dev/null +++ b/protobuf/lib/google/api/annotations.proto @@ -0,0 +1,32 @@ +// Copyright (c) 2015, Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.api; + +import "google/api/http.proto"; +import "google/protobuf/descriptor.proto"; + +option csharp_namespace = "Google.Protobuf"; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "AnnotationsProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +extend google.protobuf.MethodOptions { + // See `HttpRule`. + HttpRule http = 72295728; +} diff --git a/protobuf/lib/google/api/http.proto b/protobuf/lib/google/api/http.proto new file mode 100644 index 0000000..d554871 --- /dev/null +++ b/protobuf/lib/google/api/http.proto @@ -0,0 +1,377 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api; + +option csharp_namespace = "Google.Protobuf"; +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/annotations;annotations"; +option java_multiple_files = true; +option java_outer_classname = "HttpProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Defines the HTTP configuration for an API service. It contains a list of +// [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method +// to one or more HTTP REST API methods. +message Http { + // A list of HTTP configuration rules that apply to individual API methods. + // + // **NOTE:** All service configuration rules follow "last one wins" order. + repeated HttpRule rules = 1; + + // When set to true, URL path parameters will be fully URI-decoded except in + // cases of single segment matches in reserved expansion, where "%2F" will be + // left encoded. + // + // The default behavior is to not decode RFC 6570 reserved characters in multi + // segment matches. + bool fully_decode_reserved_expansion = 2; +} + +// # gRPC Transcoding +// +// gRPC Transcoding is a feature for mapping between a gRPC method and one or +// more HTTP REST endpoints. It allows developers to build a single API service +// that supports both gRPC APIs and REST APIs. Many systems, including [Google +// APIs](https://github.com/googleapis/googleapis), +// [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC +// Gateway](https://github.com/grpc-ecosystem/grpc-gateway), +// and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature +// and use it for large scale production services. +// +// `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies +// how different portions of the gRPC request message are mapped to the URL +// path, URL query parameters, and HTTP request body. It also controls how the +// gRPC response message is mapped to the HTTP response body. `HttpRule` is +// typically specified as an `google.api.http` annotation on the gRPC method. +// +// Each mapping specifies a URL path template and an HTTP method. The path +// template may refer to one or more fields in the gRPC request message, as long +// as each field is a non-repeated field with a primitive (non-message) type. +// The path template controls how fields of the request message are mapped to +// the URL path. +// +// Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/{name=messages/*}" +// }; +// } +// } +// message GetMessageRequest { +// string name = 1; // Mapped to URL path. +// } +// message Message { +// string text = 1; // The resource content. +// } +// +// This enables an HTTP REST to gRPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")` +// +// Any fields in the request message which are not bound by the path template +// automatically become HTTP query parameters if there is no HTTP request body. +// For example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get:"/v1/messages/{message_id}" +// }; +// } +// } +// message GetMessageRequest { +// message SubMessage { +// string subfield = 1; +// } +// string message_id = 1; // Mapped to URL path. +// int64 revision = 2; // Mapped to URL query parameter `revision`. +// SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`. +// } +// +// This enables a HTTP JSON to RPC mapping as below: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456?revision=2&sub.subfield=foo` | +// `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield: +// "foo"))` +// +// Note that fields which are mapped to URL query parameters must have a +// primitive type or a repeated primitive type or a non-repeated message type. +// In the case of a repeated type, the parameter can be repeated in the URL +// as `...?param=A¶m=B`. In the case of a message type, each field of the +// message is mapped to a separate parameter, such as +// `...?foo.a=A&foo.b=B&foo.c=C`. +// +// For HTTP methods that allow a request body, the `body` field +// specifies the mapping. Consider a REST update method on the +// message resource collection: +// +// service Messaging { +// rpc UpdateMessage(UpdateMessageRequest) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "message" +// }; +// } +// } +// message UpdateMessageRequest { +// string message_id = 1; // mapped to the URL +// Message message = 2; // mapped to the body +// } +// +// The following HTTP JSON to RPC mapping is enabled, where the +// representation of the JSON in the request body is determined by +// protos JSON encoding: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" message { text: "Hi!" })` +// +// The special name `*` can be used in the body mapping to define that +// every field not bound by the path template should be mapped to the +// request body. This enables the following alternative definition of +// the update method: +// +// service Messaging { +// rpc UpdateMessage(Message) returns (Message) { +// option (google.api.http) = { +// patch: "/v1/messages/{message_id}" +// body: "*" +// }; +// } +// } +// message Message { +// string message_id = 1; +// string text = 2; +// } +// +// +// The following HTTP JSON to RPC mapping is enabled: +// +// HTTP | gRPC +// -----|----- +// `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id: +// "123456" text: "Hi!")` +// +// Note that when using `*` in the body mapping, it is not possible to +// have HTTP parameters, as all fields not bound by the path end in +// the body. This makes this option more rarely used in practice when +// defining REST APIs. The common usage of `*` is in custom methods +// which don't use the URL at all for transferring data. +// +// It is possible to define multiple HTTP methods for one RPC by using +// the `additional_bindings` option. Example: +// +// service Messaging { +// rpc GetMessage(GetMessageRequest) returns (Message) { +// option (google.api.http) = { +// get: "/v1/messages/{message_id}" +// additional_bindings { +// get: "/v1/users/{user_id}/messages/{message_id}" +// } +// }; +// } +// } +// message GetMessageRequest { +// string message_id = 1; +// string user_id = 2; +// } +// +// This enables the following two alternative HTTP JSON to RPC mappings: +// +// HTTP | gRPC +// -----|----- +// `GET /v1/messages/123456` | `GetMessage(message_id: "123456")` +// `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id: +// "123456")` +// +// ## Rules for HTTP mapping +// +// 1. Leaf request fields (recursive expansion nested messages in the request +// message) are classified into three categories: +// - Fields referred by the path template. They are passed via the URL path. +// - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP +// request body. +// - All other fields are passed via the URL query parameters, and the +// parameter name is the field path in the request message. A repeated +// field can be represented as multiple query parameters under the same +// name. +// 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields +// are passed via URL path and HTTP request body. +// 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all +// fields are passed via URL path and URL query parameters. +// +// ### Path template syntax +// +// Template = "/" Segments [ Verb ] ; +// Segments = Segment { "/" Segment } ; +// Segment = "*" | "**" | LITERAL | Variable ; +// Variable = "{" FieldPath [ "=" Segments ] "}" ; +// FieldPath = IDENT { "." IDENT } ; +// Verb = ":" LITERAL ; +// +// The syntax `*` matches a single URL path segment. The syntax `**` matches +// zero or more URL path segments, which must be the last part of the URL path +// except the `Verb`. +// +// The syntax `Variable` matches part of the URL path as specified by its +// template. A variable template must not contain other variables. If a variable +// matches a single path segment, its template may be omitted, e.g. `{var}` +// is equivalent to `{var=*}`. +// +// The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL` +// contains any reserved character, such characters should be percent-encoded +// before the matching. +// +// If a variable contains exactly one path segment, such as `"{var}"` or +// `"{var=*}"`, when such a variable is expanded into a URL path on the client +// side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The +// server side does the reverse decoding. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{var}`. +// +// If a variable contains multiple path segments, such as `"{var=foo/*}"` +// or `"{var=**}"`, when such a variable is expanded into a URL path on the +// client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded. +// The server side does the reverse decoding, except "%2F" and "%2f" are left +// unchanged. Such variables show up in the +// [Discovery +// Document](https://developers.google.com/discovery/v1/reference/apis) as +// `{+var}`. +// +// ## Using gRPC API Service Configuration +// +// gRPC API Service Configuration (service config) is a configuration language +// for configuring a gRPC service to become a user-facing product. The +// service config is simply the YAML representation of the `google.api.Service` +// proto message. +// +// As an alternative to annotating your proto file, you can configure gRPC +// transcoding in your service config YAML files. You do this by specifying a +// `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same +// effect as the proto annotation. This can be particularly useful if you +// have a proto that is reused in multiple services. Note that any transcoding +// specified in the service config will override any matching transcoding +// configuration in the proto. +// +// Example: +// +// http: +// rules: +// # Selects a gRPC method and applies HttpRule to it. +// - selector: example.v1.Messaging.GetMessage +// get: /v1/messages/{message_id}/{sub.subfield} +// +// ## Special notes +// +// When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the +// proto to JSON conversion must follow the [proto3 +// specification](https://developers.google.com/protocol-buffers/docs/proto3#json). +// +// While the single segment variable follows the semantics of +// [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String +// Expansion, the multi segment variable **does not** follow RFC 6570 Section +// 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion +// does not expand special characters like `?` and `#`, which would lead +// to invalid URLs. As the result, gRPC Transcoding uses a custom encoding +// for multi segment variables. +// +// The path variables **must not** refer to any repeated or mapped field, +// because client libraries are not capable of handling such variable expansion. +// +// The path variables **must not** capture the leading "/" character. The reason +// is that the most common use case "{var}" does not capture the leading "/" +// character. For consistency, all path variables must share the same behavior. +// +// Repeated message fields must not be mapped to URL query parameters, because +// no client library can support such complicated mapping. +// +// If an API needs to use a JSON array for request or response body, it can map +// the request or response body to a repeated field. However, some gRPC +// Transcoding implementations may not support this feature. +message HttpRule { + // Selects a method to which this rule applies. + // + // Refer to [selector][google.api.DocumentationRule.selector] for syntax details. + string selector = 1; + + // Determines the URL pattern is matched by this rules. This pattern can be + // used with any of the {get|put|post|delete|patch} methods. A custom method + // can be defined using the 'custom' field. + oneof pattern { + // Maps to HTTP GET. Used for listing and getting information about + // resources. + string get = 2; + + // Maps to HTTP PUT. Used for replacing a resource. + string put = 3; + + // Maps to HTTP POST. Used for creating a resource or performing an action. + string post = 4; + + // Maps to HTTP DELETE. Used for deleting a resource. + string delete = 5; + + // Maps to HTTP PATCH. Used for updating a resource. + string patch = 6; + + // The custom pattern is used for specifying an HTTP method that is not + // included in the `pattern` field, such as HEAD, or "*" to leave the + // HTTP method unspecified for this rule. The wild-card rule is useful + // for services that provide content to Web (HTML) clients. + CustomHttpPattern custom = 8; + } + + // The name of the request field whose value is mapped to the HTTP request + // body, or `*` for mapping all request fields not captured by the path + // pattern to the HTTP body, or omitted for not having any HTTP request body. + // + // NOTE: the referred field must be present at the top-level of the request + // message type. + string body = 7; + + // Optional. The name of the response field whose value is mapped to the HTTP + // response body. When omitted, the entire response message will be used + // as the HTTP response body. + // + // NOTE: The referred field must be present at the top-level of the response + // message type. + string response_body = 12; + + // Additional HTTP bindings for the selector. Nested bindings must + // not contain an `additional_bindings` field themselves (that is, + // the nesting may only be one level deep). + repeated HttpRule additional_bindings = 11; +} + +// A custom pattern is used for defining custom HTTP verb. +message CustomHttpPattern { + // The name of this custom HTTP verb. + string kind = 1; + + // The path matched by this custom verb. + string path = 2; +} diff --git a/protobuf/lib/google/api/httpbody.proto b/protobuf/lib/google/api/httpbody.proto new file mode 100644 index 0000000..45c1e76 --- /dev/null +++ b/protobuf/lib/google/api/httpbody.proto @@ -0,0 +1,78 @@ +// Copyright 2019 Google LLC. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +syntax = "proto3"; + +package google.api; + +import "google/protobuf/any.proto"; + +option cc_enable_arenas = true; +option go_package = "google.golang.org/genproto/googleapis/api/httpbody;httpbody"; +option java_multiple_files = true; +option java_outer_classname = "HttpBodyProto"; +option java_package = "com.google.api"; +option objc_class_prefix = "GAPI"; + +// Message that represents an arbitrary HTTP body. It should only be used for +// payload formats that can't be represented as JSON, such as raw binary or +// an HTML page. +// +// +// This message can be used both in streaming and non-streaming API methods in +// the request as well as the response. +// +// It can be used as a top-level request field, which is convenient if one +// wants to extract parameters from either the URL or HTTP template into the +// request fields and also want access to the raw HTTP body. +// +// Example: +// +// message GetResourceRequest { +// // A unique request id. +// string request_id = 1; +// +// // The raw HTTP body is bound to this field. +// google.api.HttpBody http_body = 2; +// } +// +// service ResourceService { +// rpc GetResource(GetResourceRequest) returns (google.api.HttpBody); +// rpc UpdateResource(google.api.HttpBody) returns +// (google.protobuf.Empty); +// } +// +// Example with streaming methods: +// +// service CaldavService { +// rpc GetCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// rpc UpdateCalendar(stream google.api.HttpBody) +// returns (stream google.api.HttpBody); +// } +// +// Use of this type only changes how the request and response bodies are +// handled, all other features will continue to work unchanged. +message HttpBody { + // The HTTP Content-Type header value specifying the content type of the body. + string content_type = 1; + + // The HTTP request/response body as raw binary. + bytes data = 2; + + // Application specific response metadata. Must be set in the first response + // for streaming APIs. + repeated google.protobuf.Any extensions = 3; +} diff --git a/protobuf/lib/grpc/reflection/v1alpha/reflection.proto b/protobuf/lib/grpc/reflection/v1alpha/reflection.proto new file mode 100644 index 0000000..816852f --- /dev/null +++ b/protobuf/lib/grpc/reflection/v1alpha/reflection.proto @@ -0,0 +1,136 @@ +// Copyright 2016 gRPC authors. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Service exported by server reflection + +syntax = "proto3"; + +package grpc.reflection.v1alpha; + +service ServerReflection { + // The reflection service is structured as a bidirectional stream, ensuring + // all related requests go to a single server. + rpc ServerReflectionInfo(stream ServerReflectionRequest) + returns (stream ServerReflectionResponse); +} + +// The message sent by the client when calling ServerReflectionInfo method. +message ServerReflectionRequest { + string host = 1; + // To use reflection service, the client should set one of the following + // fields in message_request. The server distinguishes requests by their + // defined field and then handles them using corresponding methods. + oneof message_request { + // Find a proto file by the file name. + string file_by_filename = 3; + + // Find the proto file that declares the given fully-qualified symbol name. + // This field should be a fully-qualified symbol name + // (e.g. .[.] or .). + string file_containing_symbol = 4; + + // Find the proto file which defines an extension extending the given + // message type with the given field number. + ExtensionRequest file_containing_extension = 5; + + // Finds the tag numbers used by all known extensions of the given message + // type, and appends them to ExtensionNumberResponse in an undefined order. + // Its corresponding method is best-effort: it's not guaranteed that the + // reflection service will implement this method, and it's not guaranteed + // that this method will provide all extensions. Returns + // StatusCode::UNIMPLEMENTED if it's not implemented. + // This field should be a fully-qualified type name. The format is + // . + string all_extension_numbers_of_type = 6; + + // List the full names of registered services. The content will not be + // checked. + string list_services = 7; + } +} + +// The type name and extension number sent by the client when requesting +// file_containing_extension. +message ExtensionRequest { + // Fully-qualified type name. The format should be . + string containing_type = 1; + int32 extension_number = 2; +} + +// The message sent by the server to answer ServerReflectionInfo method. +message ServerReflectionResponse { + string valid_host = 1; + ServerReflectionRequest original_request = 2; + // The server set one of the following fields accroding to the message_request + // in the request. + oneof message_response { + // This message is used to answer file_by_filename, file_containing_symbol, + // file_containing_extension requests with transitive dependencies. As + // the repeated label is not allowed in oneof fields, we use a + // FileDescriptorResponse message to encapsulate the repeated fields. + // The reflection service is allowed to avoid sending FileDescriptorProtos + // that were previously sent in response to earlier requests in the stream. + FileDescriptorResponse file_descriptor_response = 4; + + // This message is used to answer all_extension_numbers_of_type requst. + ExtensionNumberResponse all_extension_numbers_response = 5; + + // This message is used to answer list_services request. + ListServiceResponse list_services_response = 6; + + // This message is used when an error occurs. + ErrorResponse error_response = 7; + } +} + +// Serialized FileDescriptorProto messages sent by the server answering +// a file_by_filename, file_containing_symbol, or file_containing_extension +// request. +message FileDescriptorResponse { + // Serialized FileDescriptorProto messages. We avoid taking a dependency on + // descriptor.proto, which uses proto2 only features, by making them opaque + // bytes instead. + repeated bytes file_descriptor_proto = 1; +} + +// A list of extension numbers sent by the server answering +// all_extension_numbers_of_type request. +message ExtensionNumberResponse { + // Full name of the base type, including the package name. The format + // is . + string base_type_name = 1; + repeated int32 extension_number = 2; +} + +// A list of ServiceResponse sent by the server answering list_services request. +message ListServiceResponse { + // The information of each service may be expanded in the future, so we use + // ServiceResponse message to encapsulate it. + repeated ServiceResponse service = 1; +} + +// The information of a single service used by ListServiceResponse to answer +// list_services request. +message ServiceResponse { + // Full name of a registered service, including its package name. The format + // is . + string name = 1; +} + +// The error code and error message sent by the server when an error occurs. +message ErrorResponse { + // This field uses the error codes defined in grpc::StatusCode. + int32 error_code = 1; + string error_message = 2; +} diff --git a/protobuf/proto/cloudstate/entity_key.proto b/protobuf/proto/cloudstate/entity_key.proto new file mode 100644 index 0000000..d208121 --- /dev/null +++ b/protobuf/proto/cloudstate/entity_key.proto @@ -0,0 +1,30 @@ +// Copyright 2019 Lightbend Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Extension for specifying which field in a message is to be considered an +// entity key, for the purposes associating gRPC calls with entities and +// sharding. + +syntax = "proto3"; + +import "google/protobuf/descriptor.proto"; + +package cloudstate; + +option java_package = "io.cloudstate"; +option go_package = "github.com/cloudstateio/go-support/cloudstate/;cloudstate"; + +extend google.protobuf.FieldOptions { + bool entity_key = 50002; +} diff --git a/protobuf/proto/cloudstate/eventing.proto b/protobuf/proto/cloudstate/eventing.proto new file mode 100644 index 0000000..4ca2c48 --- /dev/null +++ b/protobuf/proto/cloudstate/eventing.proto @@ -0,0 +1,35 @@ +// Copyright 2019 Lightbend Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Extension for specifying which topics a gRPC endpoint should be connected +// to, in order to facilitate consuming and producing events from a message broker. + +syntax = "proto3"; + +import "google/protobuf/descriptor.proto"; + +package cloudstate; + +option java_package = "io.cloudstate"; +option java_multiple_files = true; +option java_outer_classname = "EventsProto"; + +message Eventing { + string in = 1; + string out = 2; // Special value "discard" means do not publish +} + +extend google.protobuf.MethodOptions { + Eventing eventing = 50003; +} diff --git a/protobuf/proto/cloudstate/test/functiondemo/functiondemo.proto b/protobuf/proto/cloudstate/test/functiondemo/functiondemo.proto new file mode 100644 index 0000000..6debda5 --- /dev/null +++ b/protobuf/proto/cloudstate/test/functiondemo/functiondemo.proto @@ -0,0 +1,36 @@ +syntax = "proto3"; +import "google/api/annotations.proto"; +import "google/api/http.proto"; + +package com.example.functiondemo; + +message FunctionRequest{ + string foo = 1; +} + +message FunctionResponse{ + string bar = 1; +} + +message AddToSum{ + float quantity = 1; +} +message SumTotal{ + float total = 1; +} + +service FunctionDemo{ + rpc ReverseString(FunctionRequest) returns (FunctionResponse) { + option (google.api.http) = { + post: "/function_example/reverse_string", + body: "*", + }; + } + + rpc ReverseStrings(stream FunctionRequest) returns (stream FunctionResponse) { + } + rpc SumStream(stream AddToSum) returns (SumTotal) { + } + rpc SillyLetterStream(FunctionRequest) returns (stream FunctionResponse) { + } +} \ No newline at end of file diff --git a/protobuf/proto/cloudstate/test/functiondemo/functiondemo2.proto b/protobuf/proto/cloudstate/test/functiondemo/functiondemo2.proto new file mode 100644 index 0000000..5d64a20 --- /dev/null +++ b/protobuf/proto/cloudstate/test/functiondemo/functiondemo2.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; +import "google/api/annotations.proto"; +import "google/api/http.proto"; + +package com.example.functiondemo2; + +message FunctionRequest2{ + string foo = 1; +} + +message FunctionResponse2{ + string bar = 1; +} + +service FunctionDemo2{ + rpc ReverseString2(FunctionRequest2) returns (FunctionResponse2) { + option (google.api.http) = { + post: "/function_example2/reverse_string2", + body: "*", + }; + } +} \ No newline at end of file diff --git a/protobuf/proto/cloudstate/test/shoppingcart/persistence/domain.proto b/protobuf/proto/cloudstate/test/shoppingcart/persistence/domain.proto new file mode 100644 index 0000000..c827b80 --- /dev/null +++ b/protobuf/proto/cloudstate/test/shoppingcart/persistence/domain.proto @@ -0,0 +1,27 @@ +// These are the messages that get persisted - the events, plus the current state (Cart) for snapshots. +syntax = "proto3"; + +package com.example.shoppingcart.persistence; + +option go_package = "persistence"; + +message LineItem { + string productId = 1; + string name = 2; + int32 quantity = 3; +} + +// The item added event. +message ItemAdded { + LineItem item = 1; +} + +// The item removed event. +message ItemRemoved { + string productId = 1; +} + +// The shopping cart state. +message Cart { + repeated LineItem items = 1; +} diff --git a/protobuf/proto/cloudstate/test/shoppingcart/shoppingcart.proto b/protobuf/proto/cloudstate/test/shoppingcart/shoppingcart.proto new file mode 100644 index 0000000..27e1baf --- /dev/null +++ b/protobuf/proto/cloudstate/test/shoppingcart/shoppingcart.proto @@ -0,0 +1,63 @@ +// This is the public API offered by the shopping cart entity. +syntax = "proto3"; + +import "google/protobuf/empty.proto"; +import "cloudstate/entity_key.proto"; +import "cloudstate/eventing.proto"; +import "google/api/annotations.proto"; +import "google/api/http.proto"; +import "google/api/httpbody.proto"; + +package com.example.shoppingcart; + +option go_package = "tck/shoppingcart"; + +message AddLineItem { + string user_id = 1 [(.cloudstate.entity_key) = true]; + string product_id = 2; + string name = 3; + int32 quantity = 4; +} + +message RemoveLineItem { + string user_id = 1 [(.cloudstate.entity_key) = true]; + string product_id = 2; +} + +message GetShoppingCart { + string user_id = 1 [(.cloudstate.entity_key) = true]; +} + +message LineItem { + string product_id = 1; + string name = 2; + int32 quantity = 3; +} + +message Cart { + repeated LineItem items = 1; +} + +service ShoppingCart { + rpc AddItem(AddLineItem) returns (google.protobuf.Empty) { + option (google.api.http) = { + post: "/cart/{user_id}/items/add", + body: "*", + }; + option (.cloudstate.eventing).in = "items"; + } + + rpc RemoveItem(RemoveLineItem) returns (google.protobuf.Empty) { + option (google.api.http).post = "/cart/{user_id}/items/{product_id}/remove"; + } + + rpc GetCart(GetShoppingCart) returns (Cart) { + option (google.api.http) = { + get: "/carts/{user_id}", + additional_bindings: { + get: "/carts/{user_id}/items", + response_body: "items" + } + }; + } +} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..bb69c0a --- /dev/null +++ b/requirements.txt @@ -0,0 +1,9 @@ +protobuf>=3.11.3 +attrs>=19.3.0 +google-api>=0.1.12 +googleapis-common-protos >= 1.51.0 +grpcio>=1.28.1 +grpcio-tools>=1.28.1 +pytest>=5.4.2 +six>=1.14.0 +grpcio-reflection>=1.28.1e diff --git a/scripts/compile-protobuf.sh b/scripts/compile-protobuf.sh new file mode 100755 index 0000000..3a80be4 --- /dev/null +++ b/scripts/compile-protobuf.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash + +# this script is not needed if you use the setuptools installer; protobuf installation +# to pythonpath will occur if you pip install. + +set -o nounset +set -o errexit +set -o pipefail + +# follow the basic steps here: https://grpc.io/docs/tutorials/basic/python/ +python3 -m grpc_tools.protoc -Iprotobuf/protocol --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/protocol/cloudstate/entity.proto +python3 -m grpc_tools.protoc -Iprotobuf/protocol -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/protocol/cloudstate/crdt.proto + +python3 -m grpc_tools.protoc -Iprotobuf/protocol --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/protocol/cloudstate/event_sourced.proto +python3 -m grpc_tools.protoc -Iprotobuf/protocol -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/protocol/cloudstate/function.proto +python3 -m grpc_tools.protoc -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/frontend/cloudstate/entity_key.proto +python3 -m grpc_tools.protoc -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/frontend/cloudstate/eventing.proto + +python3 -m grpc_tools.protoc -Iprotobuf/example -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/example/shoppingcart/shoppingcart.proto +python3 -m grpc_tools.protoc -Iprotobuf/example -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/example/shoppingcart/persistence/domain.proto +python3 -m grpc_tools.protoc -Iprotobuf/example -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/example/functiondemo/functiondemo.proto +python3 -m grpc_tools.protoc -Iprotobuf/example -Iprotobuf/frontend --python_out=${1:-.} --grpc_python_out=${1:-.} ./protobuf/example/functiondemo/functiondemo2.proto + +# optional +#python3 -m grpc_tools.protoc -Iprotobuf/ --python_out=. --grpc_python_out=. ./protobuf/proxy/grpc/reflection/v1alpha/reflection.proto +#python3 -m grpc_tools.protoc -Iprotobuf/ --python_out=. --grpc_python_out=. protobuf/frontend/google/api/annotations.proto +#python3 -m grpc_tools.protoc -Iprotobuf/frontend --python_out=. --grpc_python_out=. protobuf/frontend/google/api/annotations.proto +#python3 -m grpc_tools.protoc -Iprotobuf/frontend --python_out=. --grpc_python_out=. protobuf/frontend/google/api/http.proto \ No newline at end of file diff --git a/scripts/fetch-cloudstate-pb.sh b/scripts/fetch-cloudstate-pb.sh new file mode 100755 index 0000000..bc31486 --- /dev/null +++ b/scripts/fetch-cloudstate-pb.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +set -o nounset +set -o errexit +set -o pipefail + +function fetch() { + local path=$1 + local tag=$2 + mkdir -p protobuf/$(dirname $path) + curl -o protobuf/${path} https://raw.githubusercontent.com/cloudstateio/cloudstate/${tag}/protocols/${path} + #sed 's/^option java_package.*/option go_package = "${go_package}";/' protobuf/${path} +} + +tag=$1 + +# Cloudstate protocol +fetch "protocol/cloudstate/entity.proto" $tag +fetch "protocol/cloudstate/event_sourced.proto" $tag +fetch "protocol/cloudstate/function.proto" $tag +fetch "protocol/cloudstate/crdt.proto" $tag + +# TCK shopping cart example +fetch "example/shoppingcart/shoppingcart.proto" $tag +fetch "example/shoppingcart/persistence/domain.proto" $tag + +# Cloudstate frontend +fetch "frontend/cloudstate/entity_key.proto" $tag +fetch "frontend/cloudstate/eventing.proto" $tag + +# dependencies +#fetch "proxy/grpc/reflection/v1alpha/reflection.proto" $tag +fetch "frontend/google/api/annotations.proto" $tag +fetch "frontend/google/api/http.proto" $tag +fetch "frontend/google/api/httpbody.proto" $tag \ No newline at end of file diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..75a1dc5 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,33 @@ +[metadata] +url = https://cloudstate.io/ +author = Cloudstate +license = Apache +license_file = LICENSE +classifiers = + Intended Audience :: Developers + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python + Programming Language :: Python :: 3.6 +project_urls = + Documentation = https://cloudstate.io/docs/user/lang/index.html + Source = https://github.com/cloudstateio/python-support + +[options] +python_requires = >=3.6 +packages = find: +include_package_data = true +zip_safe = false +install_requires = + protobuf == 3.11.3 + google-api == 0.1.12 + grpcio == 1.28.1 + grpcio-tools == 1.28.1 + attrs == 19.3.0 + googleapis-common-protos >= 1.51.0 + +[aliases] +test=pytest + +[tool:pytest] +python_files = cloudstate/test/**/test_*.py \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..0a2d85e --- /dev/null +++ b/setup.py @@ -0,0 +1,72 @@ +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" +import os + +from setuptools import setup, find_packages +import pathlib + +# Load version in cloudstate package. +from setuptools.command.build_py import build_py + +exec(open("cloudstate/version.py").read()) + +PROTOBUF_VERSION = "v0.5.1" + +version = __version__ # noqa +name = "cloudstate" + +print(f"package name: {name}, version: {version}", flush=True) + +proto_lib_roots = ["protobuf/lib"] +proto_roots = ["protobuf/proto"] + + +class FetchBuildProtosCommand(build_py): + """fetch libs and install the protocol buffer generated sources.""" + + def run(self): + os.system(f"scripts/fetch-cloudstate-pb.sh {PROTOBUF_VERSION}") + + for proto_root in proto_roots + proto_lib_roots: + for root, subdirs, files in os.walk(proto_root): + for file in [f for f in files if f.endswith(".proto")]: + file_path = pathlib.Path(root) / file + destination = "." + print(f"compiling {file_path} to {destination}") + command = f"python -m grpc_tools.protoc {' '.join([' -I ' + i for i in proto_roots + proto_lib_roots])} --python_out={destination} --grpc_python_out={destination} {file_path}" + os.system(command) + + return super().run() + + +packages = find_packages(exclude=[]) + +print(f"packages: {packages}") +setup( + name=name, + version=version, + url="https://github.com/cloudstateio/python-support", + license="Apache 2.0", + description="Cloudstate Python Support Library", + packages=packages, + long_description=open("Description.md", "r").read(), + long_description_content_type="text/markdown", + zip_safe=False, + scripts=["scripts/compile-protobuf.sh", "scripts/fetch-cloudstate-pb.sh"], + install_requires=[ + "attrs>=19.3.0", + "google-api>=0.1.12", + "googleapis-common-protos >= 1.51.0", + "grpcio>=1.28.1", + "grpcio-tools>=1.28.1", + "protobuf>=3.11.3", + "pytest>=5.4.2", + "six>=1.14.0", + "grpcio-reflection>=1.28.1e", + ], + cmdclass={ + "build_py": FetchBuildProtosCommand, + }, +) From 86a9c0c0e18330d833a121933a6732ecfb6c42f2 Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Wed, 16 Sep 2020 21:06:45 -0600 Subject: [PATCH 04/11] docker proxy-aware unit test passing --- .travis.yml | 4 ++-- .../test/functiondemo/test_functiondemo.py | 17 +++++++++++++++-- requirements.txt | 1 - setup.py | 1 + 4 files changed, 18 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index db01170..d567e2e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,8 +9,8 @@ jobs: - stage: build install: - pip install -r requirements.txt - - pip install . -vvv - script: pytest + - pip install . + script: pytest --import-mode importlib deploy: provider: pypi username: "__token__" diff --git a/cloudstate/test/functiondemo/test_functiondemo.py b/cloudstate/test/functiondemo/test_functiondemo.py index 6cc4dbb..92c89a6 100644 --- a/cloudstate/test/functiondemo/test_functiondemo.py +++ b/cloudstate/test/functiondemo/test_functiondemo.py @@ -4,6 +4,7 @@ """ import logging +import time import grpc import pytest @@ -26,6 +27,7 @@ def evaluate_functiondemo_server(host: str, port: int): logger.info(f"connecting on {server_hostport}") channel = grpc.insecure_channel(server_hostport) + logger.info("channel established.") stub = FunctionDemoStub(channel) request_oof = FunctionRequest(foo="oof") response = stub.ReverseString(request_oof) @@ -83,5 +85,16 @@ def evaluate_functiondemo_server(host: str, port: int): def test_functiondemo(): server_thread = run_test_server(port=8080) - evaluate_functiondemo_server("localhost", 8080) - server_thread.stop() + import docker + client = docker.from_env() + container = client.containers.run("cloudstateio/cloudstate-proxy-dev-mode", environment={"USER_FUNCTION_HOST":"127.0.0.1", "USER_FUNCTION_PORT":"8080"},detach=True, ports={'9000/tcp': 9000}, network="host") + logger.info(f"status {container.status}") + try: + time.sleep(15) + evaluate_functiondemo_server("127.0.0.1", 9000) + except Exception as e: + raise e + finally: + server_thread.stop(None) + logger.info(container.logs()) + container.stop() \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index bb69c0a..b322256 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,3 @@ -protobuf>=3.11.3 attrs>=19.3.0 google-api>=0.1.12 googleapis-common-protos >= 1.51.0 diff --git a/setup.py b/setup.py index 0a2d85e..0d65f21 100644 --- a/setup.py +++ b/setup.py @@ -65,6 +65,7 @@ def run(self): "pytest>=5.4.2", "six>=1.14.0", "grpcio-reflection>=1.28.1e", + "docker" ], cmdclass={ "build_py": FetchBuildProtosCommand, From 6dafbbd8aad9a2104482cd716b22adb67bd69f1d Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Wed, 16 Sep 2020 21:16:10 -0600 Subject: [PATCH 05/11] add docker to travis --- .travis.yml | 5 +++-- .../test/shoppingcart/test_shoppingcart.py | 19 ++++++++++++++++--- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index d567e2e..a59f9e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,8 @@ language: python python: - "3.8" - +services: + - docker jobs: include: @@ -10,7 +11,7 @@ jobs: install: - pip install -r requirements.txt - pip install . - script: pytest --import-mode importlib + script: pytest --import-mode=importlib deploy: provider: pypi username: "__token__" diff --git a/cloudstate/test/shoppingcart/test_shoppingcart.py b/cloudstate/test/shoppingcart/test_shoppingcart.py index 576caac..fed3b29 100644 --- a/cloudstate/test/shoppingcart/test_shoppingcart.py +++ b/cloudstate/test/shoppingcart/test_shoppingcart.py @@ -1,3 +1,5 @@ +import time + import grpc @@ -31,6 +33,17 @@ def evaluate_shoppingcart_server(host: str, port: int): def test_shoppingcart(): - server_thread = run_test_server(port=8080) - evaluate_shoppingcart_server("localhost", 8080) - server_thread.stop() + server_thread = run_test_server(port=8081) + import docker + client = docker.from_env() + container = client.containers.run("cloudstateio/cloudstate-proxy-dev-mode", environment={"USER_FUNCTION_HOST":"127.0.0.1", "USER_FUNCTION_PORT":"8081"},detach=True, ports={'9000/tcp': 9000}, network="host") + logger.info(f"status {container.status}") + try: + time.sleep(15) + evaluate_shoppingcart_server("127.0.0.1", 9000) + except Exception as e: + raise e + finally: + server_thread.stop(None) + logger.info(container.logs()) + container.stop() \ No newline at end of file From 2c4e4936ecbcca0f884f920e2846fd47f2ac7bd6 Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Wed, 16 Sep 2020 22:28:50 -0600 Subject: [PATCH 06/11] function return, pull image --- .travis.yml | 4 ++++ cloudstate/test/functiondemo/function_definition.py | 2 +- cloudstate/test/functiondemo/test_functiondemo.py | 1 + cloudstate/test/shoppingcart/shopping_cart_entity.py | 1 - cloudstate/test/shoppingcart/test_shoppingcart.py | 1 + 5 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index a59f9e7..455c951 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,10 @@ python: - "3.8" services: - docker + +#before_install: +# - docker pull cloudstateio/cloudstate-proxy-dev-mode:latest + jobs: include: diff --git a/cloudstate/test/functiondemo/function_definition.py b/cloudstate/test/functiondemo/function_definition.py index 4f51b39..7cda059 100644 --- a/cloudstate/test/functiondemo/function_definition.py +++ b/cloudstate/test/functiondemo/function_definition.py @@ -84,4 +84,4 @@ def reverse_string2( ctx.fail("Intentionally failed.") else: - return FunctionResponse(bar=arg.foo[::-1] + "!") + return FunctionResponse2(bar=arg.foo[::-1] + "!") diff --git a/cloudstate/test/functiondemo/test_functiondemo.py b/cloudstate/test/functiondemo/test_functiondemo.py index 92c89a6..8b42af3 100644 --- a/cloudstate/test/functiondemo/test_functiondemo.py +++ b/cloudstate/test/functiondemo/test_functiondemo.py @@ -87,6 +87,7 @@ def test_functiondemo(): server_thread = run_test_server(port=8080) import docker client = docker.from_env() + client.images.pull('cloudstateio/cloudstate-proxy-dev-mode:latest') container = client.containers.run("cloudstateio/cloudstate-proxy-dev-mode", environment={"USER_FUNCTION_HOST":"127.0.0.1", "USER_FUNCTION_PORT":"8080"},detach=True, ports={'9000/tcp': 9000}, network="host") logger.info(f"status {container.status}") try: diff --git a/cloudstate/test/shoppingcart/shopping_cart_entity.py b/cloudstate/test/shoppingcart/shopping_cart_entity.py index a7f058e..55d1723 100644 --- a/cloudstate/test/shoppingcart/shopping_cart_entity.py +++ b/cloudstate/test/shoppingcart/shopping_cart_entity.py @@ -91,7 +91,6 @@ def item_removed(state: ShoppingCartState, event: ItemRemoved): def get_cart( state: ShoppingCartState, item: GetShoppingCart, ctx: EventSourcedCommandContext ): - print(f"get shopping cart: {item}") cart = Cart() cart.items.extend(state.cart.values()) diff --git a/cloudstate/test/shoppingcart/test_shoppingcart.py b/cloudstate/test/shoppingcart/test_shoppingcart.py index fed3b29..501e5f2 100644 --- a/cloudstate/test/shoppingcart/test_shoppingcart.py +++ b/cloudstate/test/shoppingcart/test_shoppingcart.py @@ -36,6 +36,7 @@ def test_shoppingcart(): server_thread = run_test_server(port=8081) import docker client = docker.from_env() + # client.images.pull('cloudstateio/cloudstate-proxy-dev-mode:latest') container = client.containers.run("cloudstateio/cloudstate-proxy-dev-mode", environment={"USER_FUNCTION_HOST":"127.0.0.1", "USER_FUNCTION_PORT":"8081"},detach=True, ports={'9000/tcp': 9000}, network="host") logger.info(f"status {container.status}") try: From 38192d37ac7f07638d51d775f16501639a3d8b22 Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Wed, 16 Sep 2020 22:37:02 -0600 Subject: [PATCH 07/11] final cleanup --- cloudstate/eventsourced_servicer.py | 3 +-- cloudstate/function_servicer.py | 3 +-- cloudstate/stateless_function_entity.py | 2 +- .../test/functiondemo/test_functiondemo.py | 13 +++++++--- cloudstate/test/run_test_server.py | 3 --- cloudstate/test/shoppingcart/shopping_cart.py | 1 - .../test/shoppingcart/shopping_cart_entity.py | 1 - .../test/shoppingcart/test_shoppingcart.py | 25 +++++++++++++------ cloudstate/test/tck_services.py | 2 +- 9 files changed, 31 insertions(+), 22 deletions(-) diff --git a/cloudstate/eventsourced_servicer.py b/cloudstate/eventsourced_servicer.py index 55d0428..b431e53 100644 --- a/cloudstate/eventsourced_servicer.py +++ b/cloudstate/eventsourced_servicer.py @@ -7,9 +7,7 @@ from pprint import pprint from typing import List -from cloudstate.utils.payload_utils import get_payload, pack from google.protobuf import symbol_database as _symbol_database -from google.protobuf.any_pb2 import Any from cloudstate.entity_pb2 import Command from cloudstate.event_sourced_context import ( @@ -26,6 +24,7 @@ EventSourcedStreamOut, ) from cloudstate.event_sourced_pb2_grpc import EventSourcedServicer +from cloudstate.utils.payload_utils import get_payload, pack _sym_db = _symbol_database.Default() diff --git a/cloudstate/function_servicer.py b/cloudstate/function_servicer.py index 66640c2..de9b941 100644 --- a/cloudstate/function_servicer.py +++ b/cloudstate/function_servicer.py @@ -7,9 +7,7 @@ from typing import List import grpc -from cloudstate.utils.payload_utils import get_payload from google.protobuf import symbol_database as _symbol_database -from google.protobuf.any_pb2 import Any from grpc._server import _RequestIterator from cloudstate.entity_pb2 import ClientAction @@ -20,6 +18,7 @@ StatelessFunction, StatelessFunctionHandler, ) +from cloudstate.utils.payload_utils import get_payload _sym_db = _symbol_database.Default() diff --git a/cloudstate/stateless_function_entity.py b/cloudstate/stateless_function_entity.py index 087b79a..afb7225 100644 --- a/cloudstate/stateless_function_entity.py +++ b/cloudstate/stateless_function_entity.py @@ -4,6 +4,7 @@ """ import inspect +import logging from dataclasses import dataclass, field from typing import Callable, List, MutableMapping @@ -11,7 +12,6 @@ from cloudstate.function_pb2 import _STATELESSFUNCTION from cloudstate.stateless_function_context import StatelessFunctionContext -import logging @dataclass diff --git a/cloudstate/test/functiondemo/test_functiondemo.py b/cloudstate/test/functiondemo/test_functiondemo.py index 8b42af3..ae64bfb 100644 --- a/cloudstate/test/functiondemo/test_functiondemo.py +++ b/cloudstate/test/functiondemo/test_functiondemo.py @@ -86,9 +86,16 @@ def evaluate_functiondemo_server(host: str, port: int): def test_functiondemo(): server_thread = run_test_server(port=8080) import docker + client = docker.from_env() - client.images.pull('cloudstateio/cloudstate-proxy-dev-mode:latest') - container = client.containers.run("cloudstateio/cloudstate-proxy-dev-mode", environment={"USER_FUNCTION_HOST":"127.0.0.1", "USER_FUNCTION_PORT":"8080"},detach=True, ports={'9000/tcp': 9000}, network="host") + client.images.pull("cloudstateio/cloudstate-proxy-dev-mode:latest") + container = client.containers.run( + "cloudstateio/cloudstate-proxy-dev-mode", + environment={"USER_FUNCTION_HOST": "127.0.0.1", "USER_FUNCTION_PORT": "8080"}, + detach=True, + ports={"9000/tcp": 9000}, + network="host", + ) logger.info(f"status {container.status}") try: time.sleep(15) @@ -98,4 +105,4 @@ def test_functiondemo(): finally: server_thread.stop(None) logger.info(container.logs()) - container.stop() \ No newline at end of file + container.stop() diff --git a/cloudstate/test/run_test_server.py b/cloudstate/test/run_test_server.py index 11e28ba..0b39c78 100644 --- a/cloudstate/test/run_test_server.py +++ b/cloudstate/test/run_test_server.py @@ -4,9 +4,6 @@ """ import logging -import threading - -import pytest from cloudstate.cloudstate import CloudState from cloudstate.test.functiondemo.function_definition import definition, definition2 diff --git a/cloudstate/test/shoppingcart/shopping_cart.py b/cloudstate/test/shoppingcart/shopping_cart.py index 6c5cbad..4bc0cfa 100644 --- a/cloudstate/test/shoppingcart/shopping_cart.py +++ b/cloudstate/test/shoppingcart/shopping_cart.py @@ -4,7 +4,6 @@ """ from cloudstate.cloudstate import CloudState - from cloudstate.test.shoppingcart.shopping_cart_entity import ( entity as shopping_cart_entity, ) diff --git a/cloudstate/test/shoppingcart/shopping_cart_entity.py b/cloudstate/test/shoppingcart/shopping_cart_entity.py index 55d1723..e2cdf61 100644 --- a/cloudstate/test/shoppingcart/shopping_cart_entity.py +++ b/cloudstate/test/shoppingcart/shopping_cart_entity.py @@ -10,7 +10,6 @@ from cloudstate.event_sourced_context import EventSourcedCommandContext from cloudstate.event_sourced_entity import EventSourcedEntity - from cloudstate.test.shoppingcart.persistence.domain_pb2 import Cart as DomainCart from cloudstate.test.shoppingcart.persistence.domain_pb2 import ItemAdded, ItemRemoved from cloudstate.test.shoppingcart.persistence.domain_pb2 import ( diff --git a/cloudstate/test/shoppingcart/test_shoppingcart.py b/cloudstate/test/shoppingcart/test_shoppingcart.py index 501e5f2..e6817ea 100644 --- a/cloudstate/test/shoppingcart/test_shoppingcart.py +++ b/cloudstate/test/shoppingcart/test_shoppingcart.py @@ -1,14 +1,16 @@ -import time - -import grpc - +""" +Copyright 2020 Lightbend Inc. +Licensed under the Apache License, Version 2.0. +""" import logging +import time -from cloudstate.test.shoppingcart.shoppingcart_pb2 import GetShoppingCart, AddLineItem -from cloudstate.test.shoppingcart.shoppingcart_pb2_grpc import ShoppingCartStub +import grpc from cloudstate.test.run_test_server import run_test_server +from cloudstate.test.shoppingcart.shoppingcart_pb2 import AddLineItem, GetShoppingCart +from cloudstate.test.shoppingcart.shoppingcart_pb2_grpc import ShoppingCartStub logger = logging.getLogger() @@ -35,9 +37,16 @@ def evaluate_shoppingcart_server(host: str, port: int): def test_shoppingcart(): server_thread = run_test_server(port=8081) import docker + client = docker.from_env() # client.images.pull('cloudstateio/cloudstate-proxy-dev-mode:latest') - container = client.containers.run("cloudstateio/cloudstate-proxy-dev-mode", environment={"USER_FUNCTION_HOST":"127.0.0.1", "USER_FUNCTION_PORT":"8081"},detach=True, ports={'9000/tcp': 9000}, network="host") + container = client.containers.run( + "cloudstateio/cloudstate-proxy-dev-mode", + environment={"USER_FUNCTION_HOST": "127.0.0.1", "USER_FUNCTION_PORT": "8081"}, + detach=True, + ports={"9000/tcp": 9000}, + network="host", + ) logger.info(f"status {container.status}") try: time.sleep(15) @@ -47,4 +56,4 @@ def test_shoppingcart(): finally: server_thread.stop(None) logger.info(container.logs()) - container.stop() \ No newline at end of file + container.stop() diff --git a/cloudstate/test/tck_services.py b/cloudstate/test/tck_services.py index 4c1f5d3..8c69e98 100644 --- a/cloudstate/test/tck_services.py +++ b/cloudstate/test/tck_services.py @@ -6,8 +6,8 @@ import sys from logging import getLogger -from cloudstate.test.run_test_server import run_test_server from cloudstate.test.functiondemo.test_functiondemo import evaluate_functiondemo_server +from cloudstate.test.run_test_server import run_test_server from cloudstate.test.shoppingcart.test_shoppingcart import evaluate_shoppingcart_server logger = getLogger() From 054c85d537ca6725bdbdd41b8dd13cd757998868 Mon Sep 17 00:00:00 2001 From: Adriano Santos Date: Tue, 22 Sep 2020 11:14:03 -0300 Subject: [PATCH 08/11] Update cloudstate/cloudstate.py Co-authored-by: Dalmo Cirne --- cloudstate/cloudstate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cloudstate/cloudstate.py b/cloudstate/cloudstate.py index 5d4d998..73d9cc0 100644 --- a/cloudstate/cloudstate.py +++ b/cloudstate/cloudstate.py @@ -52,7 +52,7 @@ def port(self, port: str): def max_workers(self, workers: Optional[int] = multiprocessing.cpu_count()): """Set the gRPC Server number of Workers. - Default is equal than number of CPU Cores in the machine. + Default is equal to the number of CPU Cores in the machine. """ self.__workers = workers return self From b570a23675ceed48f2417d7200eb86a75ca1e048 Mon Sep 17 00:00:00 2001 From: Adriano Santos Date: Tue, 22 Sep 2020 11:14:52 -0300 Subject: [PATCH 09/11] Update Dockerfile Co-authored-by: Dalmo Cirne --- Dockerfile | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 74abe9e..623bf01 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,9 @@ FROM python:3.8.0-slim WORKDIR /python-support -RUN apt-get update && apt-get install -y curl +RUN apt-get update && \ + apt-get -y upgrade && \ + apt-get install -y curl --no-install-recommends COPY ./requirements.txt /python-support/requirements.txt RUN pip install -r /python-support/requirements.txt COPY ./scripts /python-support/scripts @@ -17,4 +19,4 @@ RUN pip install . -vvv WORKDIR / ENTRYPOINT ["python", "-m", "cloudstate.test.tck_services"] -EXPOSE 8080 \ No newline at end of file +EXPOSE 8080 From 5f70a4760156726675f1ac9e5a4ac54e194584d3 Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Wed, 23 Sep 2020 11:36:19 -0600 Subject: [PATCH 10/11] changes to extended_tck.sh for clean shutdown and image version lock --- extended_tck.sh | 41 +++++++++++++++++++++++++++++------------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/extended_tck.sh b/extended_tck.sh index 307156e..11ef688 100755 --- a/extended_tck.sh +++ b/extended_tck.sh @@ -5,11 +5,32 @@ RUN_SUFFIX=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 10 | head -n 1) PROXY_NAME=cloudstate-proxy-$RUN_SUFFIX USER_FUNCTION_NAME=cloudstate-function-$RUN_SUFFIX FUNCTION_CLIENT_NAME=cloudstate-function-client-$RUN_SUFFIX + TCK_NAME=cloudstate-tck-$RUN_SUFFIX +PYTHON_TCK_NAME=cloudstate-python-tck-dev:$RUN_SUFFIX + +TCK_IMAGE=cloudstateio/cloudstate-tck:0.5.1 +PROXY_IMAGE=cloudstateio/cloudstate-proxy-dev-mode + +echo using TCK image $TCK_IMAGE +echo using proxy image $PROXY_IMAGE + NETWORK_NAME=tck-network-$RUN_SUFFIX +finally() { + docker rm -f $PROXY_NAME + docker rm -f $USER_FUNCTION_NAME + docker rm -f $FUNCTION_CLIENT_NAME + docker rm -f $PYTHON_TCK_NAME + docker rmi $PYTHON_TCK_NAME + + docker network rm $NETWORK_NAME +} +trap finally EXIT +set -x + # fresh docker build -docker build -t dev-cloudstate-tck:local ./ +docker build -t $PYTHON_TCK_NAME ./ docker network create $NETWORK_NAME @@ -17,9 +38,9 @@ docker network create $NETWORK_NAME docker run -d --network $NETWORK_NAME --name $PROXY_NAME -p 9000:9000 \ -e USER_FUNCTION_HOST=$TCK_NAME \ -e USER_FUNCTION_PORT=8090 \ - cloudstateio/cloudstate-proxy-dev-mode + $PROXY_IMAGE sleep 10 -docker run -d --network $NETWORK_NAME --name $USER_FUNCTION_NAME -p 8080:8080 dev-cloudstate-tck:local \ +docker run -d --network $NETWORK_NAME --name $USER_FUNCTION_NAME -p 8080:8080 $PYTHON_TCK_NAME \ server \ shoppingcart sleep 10 @@ -27,7 +48,8 @@ docker run --rm --network $NETWORK_NAME --name $TCK_NAME -p 8090:8090 \ -e TCK_HOST=0.0.0.0 \ -e TCK_PROXY_HOST=$PROXY_NAME \ -e TCK_FRONTEND_HOST=$USER_FUNCTION_NAME \ - cloudstateio/cloudstate-tck + $TCK_IMAGE + status=$? echo "Removing docker containers" @@ -35,7 +57,7 @@ docker rm -f $PROXY_NAME docker rm -f $USER_FUNCTION_NAME # secondary integration tests for stateless function: -docker run -d --network $NETWORK_NAME --name $USER_FUNCTION_NAME -p 8080:8080 dev-cloudstate-tck:local \ +docker run -d --network $NETWORK_NAME --name $USER_FUNCTION_NAME -p 8080:8080 $PYTHON_TCK_NAME \ server \ functiondemo \ shoppingcart @@ -43,9 +65,9 @@ sleep 10 docker run -d --network $NETWORK_NAME --name $PROXY_NAME -p 9000:9000 \ -e USER_FUNCTION_HOST=$USER_FUNCTION_NAME \ -e USER_FUNCTION_PORT=8080 \ - cloudstateio/cloudstate-proxy-dev-mode + $PROXY_IMAGE sleep 10 -docker run --network $NETWORK_NAME --name $FUNCTION_CLIENT_NAME dev-cloudstate-tck:local \ +docker run --network $NETWORK_NAME --name $FUNCTION_CLIENT_NAME $PYTHON_TCK_NAME \ client \ server_host $PROXY_NAME \ functiondemo \ @@ -53,11 +75,6 @@ docker run --network $NETWORK_NAME --name $FUNCTION_CLIENT_NAME dev-cloudstate-t status1=$? -docker rm -f $PROXY_NAME -docker rm -f $USER_FUNCTION_NAME - -docker network rm $NETWORK_NAME - RETURNSTATUS=1 if [ "${status1}" == 0 ] && [ "${status}" == 0 ]; then RETURNSTATUS=0 From f705500df1a069a56908e27b2a427e03f8c149d2 Mon Sep 17 00:00:00 2001 From: GratefulTony Date: Wed, 23 Sep 2020 11:47:53 -0600 Subject: [PATCH 11/11] changes to extended_tck.sh for clean shutdown and image version lock --- extended_tck.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/extended_tck.sh b/extended_tck.sh index 11ef688..da0b6aa 100755 --- a/extended_tck.sh +++ b/extended_tck.sh @@ -21,7 +21,6 @@ finally() { docker rm -f $PROXY_NAME docker rm -f $USER_FUNCTION_NAME docker rm -f $FUNCTION_CLIENT_NAME - docker rm -f $PYTHON_TCK_NAME docker rmi $PYTHON_TCK_NAME docker network rm $NETWORK_NAME