diff --git a/antora.yml b/antora.yml
index 47472881..d826a0b6 100644
--- a/antora.yml
+++ b/antora.yml
@@ -8,7 +8,7 @@ asciidoc:
attributes:
product_name: OpenShift Serverless Logic
context: OpenShift Serverless Logic
- kogito_version_redhat: 1.27.0.Final-redhat-00005
+ kogito_version_redhat: 1.30.0.Final-redhat-00001
quarkus_platform: com.redhat.quarkus.platform
kogito_sw_ga: >-
org.kie.kogito:kogito-quarkus-serverless-workflow
@@ -17,15 +17,18 @@ asciidoc:
maven_min_version: 3.8.1
graalvm_min_version: 21.3.0
spec_version: 0.8
- vscode_version: 1.46.0
- kn_cli_version: 0.21.3
+ vscode_version: 1.66.0
+ kn_cli_version: 0.25.0
kie_tools_node_min_version: 16.13.2
kie_tools_pnpm_min_version: 7.0.0
kie_tools_golang_min_version: 1.19
+ docker_min_version: 20.10.7
+ docker_compose_min_version: 1.27.2
kogito_devservices_imagename: registry.redhat.io/openshift-serverless-1-tech-preview/logic-data-index-ephemeral-rhel8:1.24.0-11
kogito_examples_repository_url: 'https://github.com/kiegroup/kogito-examples'
- kogito_sw_examples_url: https://github.com/kiegroup/kogito-examples/tree/1.27.x/serverless-workflow-examples
+ kogito_sw_examples_url: https://github.com/kiegroup/kogito-examples/tree/1.30.x/serverless-workflow-examples
kogito_examples_url: 'https://github.com/kiegroup/kogito-examples.git'
+ kogito_apps_url: https://github.com/kiegroup/kogito-apps/tree/main
quarkus_cli_url: 'https://quarkus.io/guides/cli-tooling'
spec_website_url: 'https://serverlessworkflow.io/'
spec_doc_url: >-
@@ -48,6 +51,7 @@ asciidoc:
java_install_url: 'https://www.java.com/en/download/help/download_options.html'
maven_install_url: 'https://maven.apache.org/install.html'
docker_install_url: 'https://docs.docker.com/engine/install/'
+ docker_compose_install_url: https://docs.docker.com/compose/install/
podman_install_url: 'https://docs.podman.io/en/latest/'
kubectl_install_url: 'https://kubernetes.io/docs/tasks/tools/install-kubectl'
kn_cli_install_url: 'https://github.com/knative/client/blob/main/docs/README.md#installing-kn'
diff --git a/modules/ROOT/nav.adoc b/modules/ROOT/nav.adoc
index d3b773b8..d16b2170 100644
--- a/modules/ROOT/nav.adoc
+++ b/modules/ROOT/nav.adoc
@@ -14,6 +14,7 @@
//**** xref:serverless-logic:core/accessing-workflow-metainformation-in-runtime.adoc[Accessing workflow metainformation in runtime]
**** xref:serverless-logic:core/defining-an-input-schema-for-workflows.adoc[Defining an input schema for your workflows]
**** xref:serverless-logic:core/custom-functions-support.adoc[Custom functions for your {context} service]
+**** xref:serverless-logic:core/timeouts-support.adoc[Timeouts in {context}]
*** Tooling
**** xref:serverless-logic:tooling/serverless-workflow-editor/swf-editor-overview.adoc[Serverless Workflow editor]
***** xref:serverless-logic:tooling/serverless-workflow-editor/swf-editor-vscode-extension.adoc[VS Code extension for Serverless Workflow editor]
diff --git a/modules/serverless-logic/assets/images/core/callback-state-timeouts.svg b/modules/serverless-logic/assets/images/core/callback-state-timeouts.svg
new file mode 100644
index 00000000..7730f45c
--- /dev/null
+++ b/modules/serverless-logic/assets/images/core/callback-state-timeouts.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/modules/serverless-logic/assets/images/core/error_handling.png b/modules/serverless-logic/assets/images/core/error_handling.png
index d58a20fc..51a69446 100644
Binary files a/modules/serverless-logic/assets/images/core/error_handling.png and b/modules/serverless-logic/assets/images/core/error_handling.png differ
diff --git a/modules/serverless-logic/assets/images/core/expression_diagram.png b/modules/serverless-logic/assets/images/core/expression_diagram.png
index d849dca4..91640fe9 100644
Binary files a/modules/serverless-logic/assets/images/core/expression_diagram.png and b/modules/serverless-logic/assets/images/core/expression_diagram.png differ
diff --git a/modules/serverless-logic/assets/images/core/jobs-service-knative-architecture.png b/modules/serverless-logic/assets/images/core/jobs-service-knative-architecture.png
new file mode 100644
index 00000000..bfb10141
Binary files /dev/null and b/modules/serverless-logic/assets/images/core/jobs-service-knative-architecture.png differ
diff --git a/modules/serverless-logic/assets/images/core/switch-state-timeouts.svg b/modules/serverless-logic/assets/images/core/switch-state-timeouts.svg
new file mode 100644
index 00000000..6e69244d
--- /dev/null
+++ b/modules/serverless-logic/assets/images/core/switch-state-timeouts.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/modules/serverless-logic/assets/images/core/switch_condition.png b/modules/serverless-logic/assets/images/core/switch_condition.png
index d2503074..81a6706d 100644
Binary files a/modules/serverless-logic/assets/images/core/switch_condition.png and b/modules/serverless-logic/assets/images/core/switch_condition.png differ
diff --git a/modules/serverless-logic/assets/images/core/timeout-switch-wokflow-ui.png b/modules/serverless-logic/assets/images/core/timeout-switch-wokflow-ui.png
new file mode 100644
index 00000000..2ed8bdd2
Binary files /dev/null and b/modules/serverless-logic/assets/images/core/timeout-switch-wokflow-ui.png differ
diff --git a/modules/serverless-logic/assets/images/getting-started/hello-world-workflow.png b/modules/serverless-logic/assets/images/getting-started/hello-world-workflow.png
index 2b423fee..e31fd3ca 100644
Binary files a/modules/serverless-logic/assets/images/getting-started/hello-world-workflow.png and b/modules/serverless-logic/assets/images/getting-started/hello-world-workflow.png differ
diff --git a/modules/serverless-logic/assets/images/security/orchestrating-third-party-services-with-oauth2/currency-exchange-workflow-diagram.png b/modules/serverless-logic/assets/images/security/orchestrating-third-party-services-with-oauth2/currency-exchange-workflow-diagram.png
index 65004203..fbd83595 100644
Binary files a/modules/serverless-logic/assets/images/security/orchestrating-third-party-services-with-oauth2/currency-exchange-workflow-diagram.png and b/modules/serverless-logic/assets/images/security/orchestrating-third-party-services-with-oauth2/currency-exchange-workflow-diagram.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-fail-alert.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-fail-alert.png
new file mode 100644
index 00000000..60346651
Binary files /dev/null and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-fail-alert.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-success-alert.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-success-alert.png
new file mode 100644
index 00000000..2c794004
Binary files /dev/null and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-success-alert.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-fail-alert.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-fail-alert.png
deleted file mode 100644
index 31920bb9..00000000
Binary files a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-fail-alert.png and /dev/null differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-success-alert.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-success-alert.png
deleted file mode 100644
index ffba98b0..00000000
Binary files a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-success-alert.png and /dev/null differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-values.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-values.png
deleted file mode 100644
index de8598fa..00000000
Binary files a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-values.png and /dev/null differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events-custom-form.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events-custom-form.png
new file mode 100644
index 00000000..7e07c59d
Binary files /dev/null and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events-custom-form.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events.png
index 65dd4786..2a6cc47f 100644
Binary files a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events.png and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-details-page.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-details-page.png
index 4b6a0470..7fa13d7a 100644
Binary files a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-details-page.png and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-details-page.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-diagram-panel.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-diagram-panel.png
deleted file mode 100644
index fca20953..00000000
Binary files a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-diagram-panel.png and /dev/null differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-instance-completed.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-instance-completed.png
index 2c070624..3400b684 100644
Binary files a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-instance-completed.png and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-instance-completed.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-mermaid-diagram-panel.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-mermaid-diagram-panel.png
new file mode 100644
index 00000000..bd37035b
Binary files /dev/null and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-mermaid-diagram-panel.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-stunner-diagram-panel.png b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-stunner-diagram-panel.png
new file mode 100644
index 00000000..1e3a5e56
Binary files /dev/null and b/modules/serverless-logic/assets/images/tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-stunner-diagram-panel.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-example.png b/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-example.png
index c91f9f2b..1acf3196 100644
Binary files a/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-example.png and b/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-example.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-command-palette.png b/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-command-palette.png
index 16ef4077..f81e4599 100644
Binary files a/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-command-palette.png and b/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-command-palette.png differ
diff --git a/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-extension-page.png b/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-extension-page.png
index d21fe325..90c25a82 100644
Binary files a/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-extension-page.png and b/modules/serverless-logic/assets/images/tooling/serverless-workflow-editor/swf-editor-vscode-extension-page.png differ
diff --git a/modules/serverless-logic/assets/images/use-cases/orchestration-based-saga-pattern/order-fulfillment-saga-workflow.png b/modules/serverless-logic/assets/images/use-cases/orchestration-based-saga-pattern/order-fulfillment-saga-workflow.png
index 33a7a571..4ae88ef4 100644
Binary files a/modules/serverless-logic/assets/images/use-cases/orchestration-based-saga-pattern/order-fulfillment-saga-workflow.png and b/modules/serverless-logic/assets/images/use-cases/orchestration-based-saga-pattern/order-fulfillment-saga-workflow.png differ
diff --git a/modules/serverless-logic/pages/cloud/build-workflow-image-with-quarkus-cli.adoc b/modules/serverless-logic/pages/cloud/build-workflow-image-with-quarkus-cli.adoc
index 353c9b46..f85da74f 100644
--- a/modules/serverless-logic/pages/cloud/build-workflow-image-with-quarkus-cli.adoc
+++ b/modules/serverless-logic/pages/cloud/build-workflow-image-with-quarkus-cli.adoc
@@ -33,7 +33,7 @@ NOTE: You can skip the following procedure if you already have a workflow applic
.Clone an example application
[source,shell,subs="attributes+"]
----
-git clone --branch main {kogito_sw_examples_git_repo_url}
+git clone --branch main {kogito_sw_examples_git_repo_url}
cd kogito-examples/serverless-workflow-examples/serverless-workflow-greeting-quarkus
----
@@ -67,7 +67,7 @@ After installing the required tooling, you can start building your workflow appl
.Prerequisites
* You have created a Quarkus project.
-* Quarkus CLI is installed.
+* Quarkus CLI is installed.
For more information about installing the Quarkus CLI, see link:https://quarkus.io/guides/cli-tooling#installing-the-cli[Installing the Quarkus CLI].
.Procedure
@@ -157,7 +157,7 @@ When it comes to workflows, a small startup footprint is expected, which can be
.Prerequisites
* You have created a Quarkus project.
-* Quarkus CLI is installed.
+* Quarkus CLI is installed.
For more information about installing the Quarkus CLI, see link:{quarkus_cli_url}[Installing the Quarkus CLI].
.Procedure
@@ -242,7 +242,7 @@ Example request::
+
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {"name": "John", "language": "English"}}' http://localhost:8080/jsongreet
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://localhost:8080/jsongreet
----
Example response::
+
diff --git a/modules/serverless-logic/pages/cloud/deploying-on-minikube.adoc b/modules/serverless-logic/pages/cloud/deploying-on-minikube.adoc
index f62f8904..4d3bb3dd 100644
--- a/modules/serverless-logic/pages/cloud/deploying-on-minikube.adoc
+++ b/modules/serverless-logic/pages/cloud/deploying-on-minikube.adoc
@@ -169,7 +169,7 @@ For more information, see link:{knative_issue_url}[How to use locally built dock
In that case, use the `-Dquarkus.container-image.registry=some_of_the_values_above` property to enable Knative fetch the container images from Minikube Docker Daemon.
-If you do not use the values, you might need to set the `imagePullPolicy` to `Never` or `IfNotPresent`, otherwise, Minikube pulls the images from a remote registry.
+If you do not use the values, you might need to set the `imagePullPolicy` to `Never` or `IfNotPresent`, otherwise, Minikube pulls the images from a remote registry.
This behavior can be avoided by tagging the image using previously listed domains.
====
--
@@ -294,7 +294,7 @@ http://hello.serverless-workflow-greeting-quarkus.10.103.94.37.sslip.io
.Example request
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {"name": "John", "language": "English"}}' http://hello.serverless-workflow-greeting-quarkus.10.103.94.37.sslip.io/jsongreet
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://hello.serverless-workflow-greeting-quarkus.10.103.94.37.sslip.io/jsongreet
----
.Example response
@@ -473,7 +473,7 @@ greeting-quarkus-kubectl http://greeting-quarkus-kubectl.serverless-workflow-g
.Access workflow application
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {"name": "John", "language": "English"}}' http://greeting-quarkus-kubectl.serverless-workflow-greeting-quarkus.10.103.94.37.sslip.io/jsongreet
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://greeting-quarkus-kubectl.serverless-workflow-greeting-quarkus.10.103.94.37.sslip.io/jsongreet
----
--
@@ -555,7 +555,7 @@ greeting-quarkus-cli http://greeting-quarkus-cli.serverless-workflow-greeting-
.Access your workflow application
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {"name": "John", "language": "English"}}' http://greeting-quarkus-cli.serverless-workflow-greeting-quarkus.10.103.94.37.sslip.io/jsongreet.37.sslip.io/jsongreet
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://greeting-quarkus-cli.serverless-workflow-greeting-quarkus.10.103.94.37.sslip.io/jsongreet.37.sslip.io/jsongreet
----
--
diff --git a/modules/serverless-logic/pages/core/custom-functions-support.adoc b/modules/serverless-logic/pages/core/custom-functions-support.adoc
index d226e565..0e687abe 100644
--- a/modules/serverless-logic/pages/core/custom-functions-support.adoc
+++ b/modules/serverless-logic/pages/core/custom-functions-support.adoc
@@ -94,7 +94,7 @@ The following example shows the declaration of a `java` function:
=== Function Arguments
-Your method interface signature must copy the arguments passed by the workflow.
+Your method interface signature must copy the arguments passed by the workflow.
For example, if you invoke a function using one argument as follows, then your method signature assumes that the `number` model variable is an integer:
@@ -114,8 +114,8 @@ For example, if you invoke a function using one argument as follows, then your m
.Example of a `java` function implementation
[source,java]
----
-public class MyInterfaceOrClass {
-
+public class MyInterfaceOrClass {
+
public void myMethod(int number) {
if (number % 2 != 0) {
throw new IllegalArgumentException("Odd situation");
@@ -126,7 +126,7 @@ public class MyInterfaceOrClass {
As a particular case, if you provide no argument in the workflow definition, the signature of the Java method might include a link:https://github.com/FasterXML/jackson[Jackson's] `JsonNode` parameter. This means that the Java method expects the entire workflow model as input.
-When using the following example function reference with no arguments, and if the method signature contains a `JsonNode` parameter, the entire workflow model is passed when the method call is performed.
+When using the following example function reference with no arguments, and if the method signature contains a `JsonNode` parameter, the entire workflow model is passed when the method call is performed.
.Example of a `java` function reference with no arguments
[source,json]
@@ -160,7 +160,7 @@ If your method returns a `JsonNode`, the content of that node is merged into the
The same occurs if your method returns any Java `Object` descendant that is not a primitive wrapper, the Java object is recursively converted to a JSON object and the result is merged into the workflow model (you can use an action data filter to control what is merged).
-If your method returns a primitive type or their corresponding wrapper object (int, boolean, long, and so on), then the primitive value is added to the workflow model with the name `response` (you can change that name using an action data filter).
+If your method returns a primitive type or their corresponding wrapper object (int, boolean, long, and so on), then the primitive value is added to the workflow model with the name `response` (you can change that name using an action data filter).
If your method returns Java collections, it is converted to a JSON array and added to the workflow model with the name `response` (you can change that name using an action data filter).
@@ -174,16 +174,14 @@ Therefore, if you need to do so, you can update the signature of methods from pr
[source,java]
----
public class MyInterfaceOrClass {
-
-
- public JsonNode myMethod(JsonNode workflowData, KogitoProcessContext context ) {
+public JsonNode myMethod(JsonNode workflowData, KogitoProcessContext context ) {
// do whatever I want with the JsonNode and the Kogito process context
......
// return the modified content:
return workflowData;
}
}
-----
+----
.Example of a function accessing Kogito context
[source,java]
@@ -202,9 +200,147 @@ public class MyInterfaceOrClass {
[WARNING]
====
-Avoid using `java` functions to call the external services, instead you can use the xref:serverless-logic:service-orchestration/orchestration-of-openapi-based-services.adoc[services orchestration features].
+Avoid using `java` functions to call the external services, instead, you can use the xref:serverless-logic:service-orchestration/orchestration-of-openapi-based-services.adoc[services orchestration features].
====
+== Custom function types
+
+You can add your custom types by using the Kogito add-on mechanism. As predefined custom types like xref:serverless-logic:core/custom-functions-support.adoc#con-func-sysout[`sysout`] or xref:serverless-logic:core/custom-functions-support.adoc#con-func-java[`java`], the custom type identifier is the prefix of the operation field of the function definition.
+
+Kogito add-ons relies on the link:{quarkus_guides_base_url}/writing-extensions[Quarkus extensions] mechanism. And the add-on consists of at least two Maven projects:
+
+- The deployment module, which is responsible for generating the code required for the extension to work.
+- The runtime module, which includes the non-generated classes that are required for the extension to work.
+
+In the case of a Serverless Workflow custom type, following are the roles of the modules:
+
+- *The deployment project*
++
+The deployment project is expected to configure the work item handler used during runtime to perform the logic associated with the custom type.
+It must contain a Java class that inherits from `WorkItemTypeHandler`. Its responsibilities are to indicate the custom type identifier (the operation prefix, as indicated earlier) and to set up the `WorkItemNodeFactory` instance passed as a parameter of the `fillWorkItemHandler` method. That instance is included in the Kogito process definition for that Workflow. As a part of this setup, you must indicate the name of the `WorkItemNodeFactory`. You might also provide any relevant metadata for that handler if needed.
+
+- *The runtime project*
++
+The runtime project consists of a `WorkflowWorkItemHandler` implementation, which name must match with the one provided to `WorkItemNodeFactory` during the deployment phase, and a `WorkItemHandlerConfig` bean that registers that handler with that name.
++
+When a Serverless Workflow function is called, Kogito identifies the proper `WorkflowWorkItemHandler` instance to be used for that function type (using the handler name associated with that type by the deployment project) and then invokes the `internalExecute` method. The `Map` parameter contains the function arguments defined in the workflow, and the `WorkItem` parameter contains the metadata information added to the handler by the deployment project. Hence, the `executeWorkItem` implementation has an access to all the information needed to perform the computational logic intended for that custom type.
+
+=== Custom function type example
+
+Assuming you want to interact, from a workflow file, with a legacy RPC server as the one defined in link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-rpc-server[this project]. This legacy server supports four simple arithmetic operations: add, minus, multiply and divide, which can be invoked using a custom RPC protocol.
+
+Since this is an uncommon protocol, the workflow cannot handle them by using any of the predefined Serverless Workflow function types. The available options are to use a Java service, which invokes a Java class that knows how to interact with the server, or define a custom type that knows how to interact with the service.
+
+Using the recent approach, you can write a link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-type-example/src/main/resources/customType.sw.json[workflow file] defining this function.
+
+.RPC Custom function definition example
+
+[source,json]
+----
+ "functions": [
+ {
+ "name": "division",
+ "type": "custom",
+ "operation": "rpc:division"
+ }
+ ],
+----
+
+The `operation` starts with `rpc`, which is the custom type identifier, and continues with `division`, which denotes the operation that will be executed in the legacy server.
+
+A Kogito addon that defines the `rpc` custom type must be developed for this function definition to be identified. It is consist of a link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-rpc-deployment[deployment project] and a link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-rpc[runtime project].
+
+The deployment project is responsible for extending the link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-rpc-deployment/src/main/java/org/kie/kogito/examples/sw/services/RPCCustomTypeHandler.java[`WorkItemTypeHandler`] and setup the `WorkItemNodeFactory` as follows:
+
+.Example of the RPC function Java implementation
+
+[source,java]
+----
+
+import static org.kie.kogito.examples.sw.custom.RPCCustomWorkItemHandler.NAME;
+import static org.kie.kogito.examples.sw.custom.RPCCustomWorkItemHandler.OPERATION;
+
+public class RPCCustomTypeHandler extends WorkItemTypeHandler{
+
+
+ @Override
+ public String type() {
+ return "rpc";
+ }
+
+ @Override
+ protected > WorkItemNodeFactory fillWorkItemHandler(Workflow workflow,
+ ParserContext context,
+ WorkItemNodeFactory node,
+ FunctionDefinition functionDef) {
+ return node.workName(NAME).metaData(OPERATION, trimCustomOperation(functionDef));
+ }
+}
+
+----
+
+This example setups the name of the `KogitoWorkItemHandler`, adds a metadata key with the name of the remote operation (extracted from the Serverless Workflow function definition operation property), and declares that the custom type is named as `rpc`.
+
+The Runtime project contains the link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-rpc/src/main/java/org/kie/kogito/examples/sw/custom/RPCCustomWorkItemHandler.java[KogitoWorkItemHandler] and the link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-rpc/src/main/java/org/kie/kogito/examples/sw/custom/RPCCustomWorkItemHandlerConfig.java[WorkItemHandlerConfig] implementations.
+
+As expected, `RPCCustomWorkItemHandler` implements the `internalExecute` method as follows:
+
+.Example of implementation of the `internalExecute` method
+
+[source, java]
+----
+ @Override
+protected Object internalExecute(KogitoWorkItem workItem, Map parameters) {
+ try {
+ Iterator> iter = parameters.values().iterator();
+ Map metadata = workItem.getNodeInstance().getNode().getMetaData();
+ String operationId = (String) metadata.get(OPERATION);
+ if (operationId == null) {
+ throw new IllegalArgumentException ("Operation is a mandatory parameter");
+ }
+ return CalculatorClient.invokeOperation((String)metadata.getOrDefault(HOST,"localhost"), (int) metadata.getOrDefault(PORT, 8082),
+ OperationId.valueOf(operationId.toUpperCase()), (Integer)iter.next(), (Integer)iter.next());
+ } catch (IOException io ) {
+ throw new UncheckedIOException(io);
+ }
+}
+----
+
+The implementation invokes the link:{kogito_sw_examples_url}/serverless-workflow-custom-type/serverless-workflow-custom-rpc-server/src/main/java/org/kie/kogito/examples/sw/custom/CalculatorClient.java#L45-L67[`CalculatorClient.invokeOperation`], a java static method that knows how to interact with the legacy service. You can obtain the operation parameter from the `WorkItem` metadata. The dividend and the divisor parameters are obtained from the Map parameter, which contains the function arguments defined in the workflow file.
+
+.Example of the custom function call from the workflow definition
+
+[source, json]
+----
+"actions": [
+ {
+ "functionRef": {
+ "refName": "division",
+ "arguments": {
+ "dividend": ".dividend",
+ "divisor" : ".divisor"
+ }
+ }
+
+ }
+----
+
+The `RPCCustomWorkItemHandlerConfig` is a bean that registers the handler name.
+
+.Example of injecting the custom`WorkItemHandler`
+
+[source, java]
+----
+@Inject
+RPCCustomWorkItemHandler handler;
+
+@PostConstruct
+void init () {
+ register(handler.getName(),handler);
+}
+----
+
+
== Additional resources
* xref:serverless-logic:getting-started/cncf-serverless-workflow-specification-support.adoc[CNCF Serverless Workflow specification]
diff --git a/modules/serverless-logic/pages/core/timeouts-support.adoc b/modules/serverless-logic/pages/core/timeouts-support.adoc
new file mode 100644
index 00000000..3edee331
--- /dev/null
+++ b/modules/serverless-logic/pages/core/timeouts-support.adoc
@@ -0,0 +1,403 @@
+= Timeouts on events for {context}
+:compat-mode!:
+// Metadata:
+:description: Using timeouts in {context}
+:keywords: kogito, workflow, serverless, timeout, timer, expiration
+
+When you define a state in a serverless workflow, you can use the `timeouts` property to configure the maximum time to complete this state.
+When that time is overdue, the state is considered timed-out, and the engine continues the execution from this state. The execution flow depends on the state type, for instance,
+a transition to a next state.
+All the properties you can use to configure state timeouts are described in the link:{spec_doc_url}#event-timeout-definition[Serverless Workflow specification].
+
+Event-based states can use the sub-property `eventTimeout` to configure the maximum time to wait for an event to arrive.
+
+This property uses link:https://en.wikipedia.org/wiki/ISO_8601[`ISO 8601` data and time standard] to specify a duration of time.
+It follows the format `PnDTnHnMn.nS` with days considered to be exactly 24 hours.
+For instance, `PT15M` configures 15 minutes, and `P2DT3H4M` defines 2 days, 3 hours and 4 minutes.
+
+[NOTE]
+====
+Event timeouts can not be defined as a specific point in time, but it should be an amount of time, a duration, which is considered to start when the referred state becomes active in the workflow.
+====
+
+[IMPORTANT]
+====
+{product_name} currently, has timeouts support only for *Callback* and *Switch* states with events. Other states will be included in the future releases.
+====
+
+=== Callback state timeout
+Callback state can be used when you need to execute an action, in general to call an external service, and wait for an asynchronous response in form of an event, the callback.
+
+Once the response event is consumed, the workflow continues the execution, in general moving to the next state defined in the `transition property. See more on xref:eventing/working-with-callbacks.adoc[Callback state in {context}].
+
+Since the callback state halts the execution util the event is consumed, you can define an `eventTimeout` for it, and in case the event does not arrive in the defined duration time, the workflow continues the execution moving to the next state defined in the transition, see the <>.
+
+[#callback-state]
+.Example of callback state with timeout
+[source,json]
+----
+{
+ "name": "CallbackState",
+ "type": "callback",
+ "action": {
+ "name": "callbackAction",
+ "functionRef": {
+ "refName": "callbackFunction",
+ "arguments": {
+ "input": "${\"callback-state-timeouts: \" + $WORKFLOW.instanceId + \" has executed the callbackFunction.\"}"
+ }
+ }
+ },
+ "eventRef": "callbackEvent",
+ "transition": "CheckEventArrival",
+ "onErrors": [
+ {
+ "errorRef": "callbackError",
+ "transition": "FinalizeWithError"
+ }
+ ],
+ "timeouts": {
+ "eventTimeout": "PT30S"
+ }
+}
+----
+
+=== Switch state timeout
+
+
+The switch state can be used when you need to take an action based on conditions, defined with the link:{spec_doc_url}#switch-state-event-conditions[eventConditions] property, where the workflow execution waits to make a decision depending on the events to be consumed and matched, defined through link:{spec_doc_url}#event-definition[event definition].
+
+In this situation, you can define an event timeout, that controls the maximum time to wait for an event to match the conditions, if this time is expired, the workflow moves to the state defined in the `defaultCondition` property of the switch state, as you can see in the <>.
+
+See more details about this state on the link:{spec_doc_url}#switch-date[Serverless Workflow specification].
+
+[#switch-state]
+.Example of switch state with timeout
+[source,json]
+----
+{
+ "name": "ChooseOnEvent",
+ "type": "switch",
+ "eventConditions": [
+ {
+ "eventRef": "visaApprovedEvent",
+ "transition": "ApprovedVisa"
+ },
+ {
+ "eventRef": "visaDeniedEvent",
+ "transition": "DeniedVisa"
+ }
+ ],
+ "defaultCondition": {
+ "transition": "HandleNoVisaDecision"
+ },
+ "timeouts": {
+ "eventTimeout": "PT5S"
+ }
+}
+----
+
+=== Deploying a timed-based workflow
+
+In order to deploy a workflow that contains timeouts or any other timer-based action, it is necessary to have Job Service running in your environment, which is an external service responsible to control the workflows timers, see the <> for more information.
+In the <> you can see the details of how set up a knative infrastructure with the workflow and job service running.
+
+[#job-service]
+=== Job Service configuration
+
+All timer-related actions that might be declared in a workflow, are handled by a supporting service, called Job Service, which is responsible for managing, scheduling, and firing all actions (jobs) to be executed in the workflows.
+
+Suppose the workflow service is not configured to use job service or there is no such service running. In that case, all timer-related actions use an embedded in-memory implementation of job service, which should not be used in production, since when the application shutdown, all timers are lost, which in a serverless architecture is a very common behavior with the scale to zero approach. That said, the no job service configuration can only be used for testing or development, but not for production.
+
+The main goal of the Job Service is to work with only active jobs. The Job Service tracks only the jobs that are scheduled and that need to be executed. When a job reaches a final state, the job is removed from the Job Service.
+
+When configured in your environment, all the jobs information and status changes are sent to the {product_name} `Data
+Index Service`, where they can be indexed and made available by GraphQL queries.
+
+[NOTE]
+====
+Data index service and the support for jobs information will be available in future releases.
+====
+
+==== Job Service persistence
+
+An important configuration aspect of job service is the persistence mechanism, where all job information is stored in a database that makes this information durable upon service restarts and guarantees no information is lost.
+
+==== PostgreSQL
+
+PostgreSQL is the recommended database to use with job service.
+Additionally, it provides an initialization procedure that integrates link:https://flywaydb.org[Flyway] for the database initialization. It automatically controls the database schema, in this way all tables are created by the service.
+
+In case you need to externally control the database schema, you can check the Flyway SQL scripts in link:{kogito_apps_url}/jobs-service/jobs-service-postgresql/src/main/resources/db/migration[migration] and apply them.
+
+You need to set the proper configuration parameters when starting job service.
+The example shows how to run PostgreSQL as a Kubernetes deployment, but you can run it the way it fits in your environment, the important part is to set all the configuration parameters points to your running instance of PostgreSQL.
+
+==== Ephemeral
+Alternatively, there is an in-memory database support that does not require any external database configuration, it can be used for testing and development purposes, but it is not recommended for production, since all jobs are lost upon a service restart or failure.
+
+=== Job Service communication
+
+[NOTE]
+====
+The Job Service does not execute a job but triggers a callback that might be an HTTP request or a Cloud Event that is
+managed by the configured <> in the workflow application.
+====
+
+==== Knative Eventing
+
+To configure the communication between the Job Service and the workflow runtime through the knative eventing system, you must provide a set of configurations.
+
+The Job Service configuration is done through the deployment descriptor shown in the <>.
+
+[[job-addon-configuration]]
+==== Addon configuration in the workflow runtime
+
+The communication from the workflow application with Job Service is done through an addon, which is responsible for publishing and consuming events related to timers.
+When you run the workflow as a knative service, you must add the `kogito-addons-quarkus-jobs-knative-eventing` to your project and provide the proper configuration.
+
+* Dependency in the `pom.xml`:
+
+.Callback state example with timeout
+[source, xml]
+----
+
+ org.kie.kogito
+ kogito-addons-quarkus-jobs-knative-eventing
+
+----
+
+* Configuration parameters:
+
+[[workflow-application-configuration-parameters]]
+.Callback state example with timeout
+[source, properties]
+----
+# Events produced by kogito-addons-quarkus-jobs-knative-eventing to program the timers on the Job Service.
+mp.messaging.outgoing.kogito-job-service-job-request-events.connector=quarkus-http
+mp.messaging.outgoing.kogito-job-service-job-request-events.url=${K_SINK:http://localhost:8280/jobs/events}
+mp.messaging.outgoing.kogito-job-service-job-request-events.method=POST
+----
+
+[NOTE]
+====
+The `K_SINK` variable used in the URL configuration for the outgoing channel in the
+<>, is injected by Knative Eventing, more information on
+xref:eventing/consume-produce-events-with-knative-eventing.adoc[Consuming and producing events on Knative Eventing].
+====
+
+[#timeout-example]
+== Timeout showcase example
+
+In the link:{kogito_sw_examples_url}/serverless-workflow-timeouts-showcase[serverless-workflow-timeouts-showcase] you can see an end-to-end example that contains a serverless workflow application with timeouts configured alongside Job Service running on Knative.
+
+There are two workflows that showcase the timeouts usage in the `Callback` and `Switch` states.
+
+=== Callback workflow
+
+It is a simple workflow, where once the execution reaches the callback state it waits for the event `callbackEvent` to arrive and continue the execution.
+
+.Callback timeout workflow
+image::core/callback-state-timeouts.svg[]
+
+.Callback event
+[source, json]
+----
+{
+"name": "callbackEvent",
+"source": "",
+"type": "callback_event_type"
+}
+----
+
+The timeout is configured with a maximum time 30 seconds to be waited by the workflow to receive the callbackEvent, in case it does not arrive in time, the execution moves, and the eventData variable remains null.
+See the <>.
+
+
+=== Switch workflow
+
+The switch example is similar to the callback but once the execution reaches the state, it waits for one of the two configured events, `visaDeniedEvent` or `visaApprovedEvent`, to arrive, see the <>.
+
+If any of the configured events arrives before the timeout is overdue, the workflow execution moves to the next state defined in the corresponding `transition`.
+
+If none of the events arrive before the timeout is overdue, the workflow then moves to the state defined in `defaultCondition` transition.
+
+.Switch timeout workflow
+image::core/switch-state-timeouts.svg[]
+
+=== Running the example
+
+To run the example you must have access to a kubernetes cluster running with Knative configured.
+
+For simplicity, the example uses minikube, you can follow the steps described in the example's link:{kogito_sw_examples_url}/serverless-workflow-timeouts-showcase[readme].
+
+[NOTE]
+====
+All the descriptor files used to deploy the example infrastructure are present in the example.
+====
+
+The database and Job Service deployment files are located under `/kubernetes` folder.
+
+The descriptors related to the workflow application are generated after the build under `/target/kubernetes`.
+
+The following diagram shows the example's architecture when it is deployed in the Kubernetes + Knative infrastructure.
+
+.Knative Workflow with Job Service architecture
+image::core/jobs-service-knative-architecture.png[]
+
+==== Deploying the database
+
+The workflow application and Job Service uses PostgreSQL as the persistence backend to store information about the workflow instances and jobs, respectively.
+In the example you can deploy a single database instance to be used on both, in a production environment is recommended to have independent database instances.
+
+To run PostgreSQL you need to apply the following on the cluster:
+
+.Deploying the database
+[source, shell]
+----
+kubectl apply -f kubernetes/timeouts-showcase-database.yml
+----
+
+.After executing the command, you will see an output like this:
+[source, shell]
+----
+secret/timeouts-showcase-database created
+deployment.apps/timeouts-showcase-database created
+service/timeouts-showcase-database created
+----
+
+[#job-service-deploy]
+==== Deploying Job Service
+.Deploying Job Service
+[source, shell]
+----
+kubectl apply -f kubernetes/jobs-service-postgresql.yml
+----
+
+.After executing the command, you will see an output like this:
+[source, shell]
+----
+service/jobs-service-postgresql created
+deployment.apps/jobs-service-postgresql created
+trigger.eventing.knative.dev/jobs-service-postgresql-create-job-trigger created
+trigger.eventing.knative.dev/jobs-service-postgresql-cancel-job-trigger created
+sinkbinding.sources.knative.dev/jobs-service-postgresql-sb created
+----
+
+==== Deploying the timeout showcase workflow
+
+You need to build the workflow with the `knative` maven profile, then the descriptor files are generated under the `target/kubernetes` folder, and the image is pushed in the container registry.
+
+.Building the timeout workflow showcase for knative
+[source, shell]
+----
+mvn clean install -Pknative
+----
+
+.Deploying the timeout workflow showcase in knative
+[source, shell]
+----
+kubectl apply -f target/kubernetes/knative.yml
+kubectl apply -f target/kubernetes/kogito.yml
+----
+
+.After executing the commands you will see an output like this:
+[source, shell]
+----
+service.serving.knative.dev/timeouts-showcase created
+
+trigger.eventing.knative.dev/visa-denied-event-type-trigger-timeouts-showcase created
+trigger.eventing.knative.dev/visa-approved-event-type-trigger-timeouts-showcase created
+trigger.eventing.knative.dev/callback-event-type-trigger-timeouts-showcase created
+sinkbinding.sources.knative.dev/sb-timeouts-showcase created
+----
+
+==== Creating a workflow instance
+
+To create a workflow you can interact with the workflow using the provided REST APIs, in the example provide a test Web UI to make it easy to test.
+
+First, you need to get the service URL on the cluster.
+
+.Getting the workflow service URL on the cluster
+[source, shell]
+----
+kn service list | grep timeouts-showcase
+----
+
+.Service URL in the response, similar to this.
+[source, shell]
+----
+NAME URL LATEST AGE CONDITIONS READY REASON
+timeouts-showcase http://timeouts-showcase.default.10.105.86.217.sslip.io timeouts-showcase-00001 3m50s 3 OK / 3 True
+----
+
+=== Using the showcase UI
+
+The example Web UI is handy to interact with the workflow, you just need to open in the browser the URL you got from the previous step.
+
+.Timeout workflow showcase UI
+image::core/timeout-switch-wokflow-ui.png[]
+
+You can create new workflow instances and interact with them to complete, or simply wait for the timeout to be triggered to check it's working.
+More details on the link:{kogito_sw_examples_url}/serverless-workflow-timeouts-showcase#timeouts-showcase-ui[readme].
+
+=== Using REST APIs
+
+You can test the workflows using the REST APIs, in fact they are the same used by the Web UI in both workflows.
+
+* Callback
+
+.Creating a callback workflow with timeout
+[source, shell]
+----
+curl -X 'POST' \
+'http://timeouts-showcase.default.10.105.86.217.sslip.io/callback_state_timeouts' \
+-H 'accept: */*' \
+-H 'Content-Type: application/json' \
+-d '{}'
+----
+
+* Switch
+
+.Creating a Switch workflow with timeout
+[source, shell]
+----
+curl -X 'POST' \
+'http://timeouts-showcase.default.10.105.86.217.sslip.io/callback_state_timeouts' \
+-H 'accept: */*' \
+-H 'Content-Type: application/json' \
+-d '{}'
+----
+
+* Checking whether the workflow instance was created
+
+.Getting the created workflow instance
+[source, shell]
+----
+curl -X 'GET' 'http://timeouts-showcase.default.10.105.86.217.sslip.io/switch_state_timeouts'
+----
+
+The command will produce an output like this, which indicates that the process is waiting for an event to arrive.
+
+.Response with the created instance
+[source, shell]
+----
+[{"id":"2e8e1930-9bae-4d60-b364-6fbd61128f51","workflowdata":{}}]
+----
+
+* Checking the timeout was executed after 30 seconds:
+
+.Getting the created workflow instance after 30 seconds
+[source, shell]
+----
+curl -X 'GET' 'http://timeouts-showcase.default.10.105.86.217.sslip.io/switch_state_timeouts'
+[]
+----
+
+As you can see there are no active workflow instances, indicating the timeout was executed and the created instance was completed.
+
+== Additional resources
+
+* xref:eventing/working-with-callbacks.adoc[Callback state in {context}]
+
+include::../../pages/_common-content/report-issue.adoc[]
\ No newline at end of file
diff --git a/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc b/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc
index d3fcb730..1ecb5379 100644
--- a/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc
+++ b/modules/serverless-logic/pages/core/understanding-jq-expressions.adoc
@@ -8,9 +8,9 @@
:jsonpath_url: https://github.com/json-path/JsonPath/
:json_data_types_url: https://www.w3schools.com/js/js_json_datatypes.asp
-Each workflow instance is associated with a data model. A data model consists of a JSON object regardless of whether the workflow file contains YAML or JSON. The initial content of the JSON object depends on how the workflow is started. If the workflow is created using link:{cloud_events_url}[Cloud Event], then the workflow content is taken from the `data` property. However, if the workflow is started through an HTTP POST invocation, then the workflow content is taken from the `workflowdata` property.
+Each workflow instance is associated with a data model. A data model consists of a JSON object regardless of whether the workflow file contains YAML or JSON. The initial content of the JSON object depends on how the workflow is started. If the workflow is created using the link:{cloud_events_url}[Cloud Event], then the workflow content is taken from the `data` property. However, if the workflow is started through an HTTP POST request, then the workflow content is taken from the request body.
-The workflow expressions in the link:{spec_doc_url}#workflow-expressions[Serverless Workflow specification] are used to interact with the data model. The supported expression languages include link:{jsonpath_url}[JsonPath] and link:{jq_url}[jq]. jq expression language is the default language. However, you can change the expression language to JsonPath using the `expressionLang` property.
+The workflow expressions in the link:{spec_doc_url}#workflow-expressions[Serverless Workflow specification] are used to interact with the data model. The supported expression languages include link:{jsonpath_url}[JsonPath] and link:{jq_url}[jq]. jq expression language is the default language. However, you can change the expression language to JsonPath using the `expressionLang` property.
This document describes the usage of jq expressions in switch state conditions, action function arguments, and data filtering.
@@ -21,7 +21,7 @@ The conditions occurring in a switch state enable the workflow designer to selec
A condition in a switch state is an expression, which returns a boolean value when evaluated against the data model. If a condition associated with a state transition returns true, then the workflow must follow that transition.
-For example, in the link:{kogito_sw_examples_url}/serverless-workflow-greeting-quarkus[`serverless-workflow-greeting-quarkus`] example application, a message is displayed depending on the selected language, that is English or Spanish.
+For example, in the link:{kogito_sw_examples_url}/serverless-workflow-greeting-quarkus[`serverless-workflow-greeting-quarkus`] example application, a message is displayed depending on the selected language, that is English or Spanish.
If the value of the `language` property is English, the constant literal injected on the `message` property is _Hello from_, otherwise the constant value injected on the `message` property is _Saludos desdeā¦_.
@@ -68,15 +68,15 @@ Following is the `subtraction` function in `serverless-workflow-temperature-conv
}]
----
-The arguments in `subtraction` function are expressed as a JSON object, and the property values of the JSON object are either a string containing an expression or a link:{json_data_types_url}[JSON data type], such as string, number, or boolean.
+The arguments in `subtraction` function are expressed as a JSON object, and the property values of the JSON object are either a string containing an expression or a link:{json_data_types_url}[JSON data type], such as string, number, or boolean.
.Example arguments in `subtraction` function
[source,json]
----
-"functionRef":
+"functionRef":
{
"refName": "subtraction",
- "arguments":
+ "arguments":
{
"leftElement": ".fahrenheit",
"rightElement": ".subtractValue"
@@ -84,7 +84,7 @@ The arguments in `subtraction` function are expressed as a JSON object, and the
}
----
-In the previous example, the left number is equal to the `fahrenheit` property (an input number that invokes the workflow), and the right number is equal to the `subtractValue` property (a constant number that is injected to the workflow model by `SetConstants` state). Once the expression evaluation is resolved for all properties that contain an expression, the resulting object is passed in the OpenAPI request. Based on the OpenAPI definition, the properties in the JSON object are used as body, path, query, or header of the upcoming REST invocation.
+In the previous example, the left number is equal to the `fahrenheit` property (an input number that invokes the workflow), and the right number is equal to the `subtractValue` property (a constant number that is injected to the workflow model by `SetConstants` state). Once the expression evaluation is resolved for all properties that contain an expression, the resulting object is passed in the OpenAPI request. Based on the OpenAPI definition, the properties in the JSON object are used as body, path, query, or header of the upcoming REST invocation.
Following is an example of function arguments defined as string that contains an expression, returning a JSON object:
@@ -105,7 +105,7 @@ In the previous example, the result of the expression evaluation is the same JSO
The Serverless Workflow specification defines the following filtering mechanisms to select which information must be part of the workflow data model:
-* link:{spec_doc_url}#action-data-filters[Action data filters]: Select the part of the action result that is merged into the data model, which overrides the properties that share the name with the selected action result.
+* link:{spec_doc_url}#action-data-filters[Action data filters]: Select the part of the action result that is merged into the data model, which overrides the properties that share the name with the selected action result.
* link:{spec_doc_url}#event-data-filters[Event data filters]: Similar to the action data filters, but apply to the events instead of actions.
* link:{spec_doc_url}#state-data-filters[State data filters]: Define the workflow model to the JSON object, which is returned by the expression and discards an existing property.
@@ -130,9 +130,9 @@ Following is an expression function in `serverless-workflow-expression-quarkus`
]
----
-In the previous example, an array of complex numbers (`x` is real coordinate and `y` is imaginary coordinate) is accepted and an expression function is defined to calculate the maximum value of `x` and minimum value of `y` for the `numbers` array.
+In the previous example, an array of complex numbers (`x` is real coordinate and `y` is imaginary coordinate) is accepted and an expression function is defined to calculate the maximum value of `x` and minimum value of `y` for the `numbers` array.
-Also, the `serverless-workflow-expression-quarkus` example application contains an action data filter defined inside `squareState` action and a state data filter defined inside `finish` state. The action data filter selects the maximum value of `x` to be merged to the workflow model, and the state data filter defines the maximum value as the entire workflow model that is returned as the workflow response.
+Also, the `serverless-workflow-expression-quarkus` example application contains an action data filter defined inside `squareState` action and a state data filter defined inside `finish` state. The action data filter selects the maximum value of `x` to be merged to the workflow model, and the state data filter defines the maximum value as the entire workflow model that is returned as the workflow response.
The previous example expression also contains a `max` function of type expression and an `operation` property containing a string of jq expression. This jq expression returns a JSON object, in which the `max` property is the maximum value of the `x` coordinate and the `min` property is the minimum value of the `y` coordinate.
@@ -172,7 +172,7 @@ Therefore, after executing the action, the workflow model consists of a `number`
}
----
-The original `numbers` array should not be returned as a result of the workflow execution, therefore the final stage consists of a state data filter defining the content of the output model. The output model should contain a `result` property and the value of `result` property should be the maximum number that is stored by the previous state in the `number` property.
+The original `numbers` array should not be returned as a result of the workflow execution, therefore the final stage consists of a state data filter defining the content of the output model. The output model should contain a `result` property and the value of `result` property should be the maximum number that is stored by the previous state in the `number` property.
In the previous example, the workflow model is changed by the `input` property of the filter, which means that the output model is updated before the state is executed. As a final result, the output model consists of a `result` property, containing the maximum value of `x`.
--
@@ -181,7 +181,7 @@ Event data filter example::
+
--
-You can find an example of event data filtering in the link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus[`serverless-workflow-callback-quarkus`] example application.
+You can find an example of event data filtering in the link:{kogito_sw_examples_url}/serverless-workflow-callback-quarkus[`serverless-workflow-callback-quarkus`] example application.
.Example event filter
[source,json]
diff --git a/modules/serverless-logic/pages/core/working-with-parallelism.adoc b/modules/serverless-logic/pages/core/working-with-parallelism.adoc
index 6f127584..e23642ce 100644
--- a/modules/serverless-logic/pages/core/working-with-parallelism.adoc
+++ b/modules/serverless-logic/pages/core/working-with-parallelism.adoc
@@ -18,7 +18,7 @@ The `serverless-workflow-service-calls-quarkus` example application is a workflo
[[proc-parallel-creating-the-workflow]]
== Creating a parallel workflow
-You can create a workflow, which performs a series of parallel tasks.
+You can create a workflow, which performs a series of parallel tasks.
.Prerequisites
@@ -114,7 +114,7 @@ After you create a workflow that performs a series of parallel tasks, you can ru
.Prerequisites
-* A parallel workflow is created.
+* A parallel workflow is created.
+
For more information, see <>.
@@ -139,9 +139,7 @@ curl -X 'POST' \
'http://localhost:8080/parallel' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
- -d '{
- "workflowdata": {}
-}'
+ -d '{}'
----
.Example response
@@ -165,7 +163,7 @@ You can define the `"completionType": "atLeast"` to run only some branches in pa
.Prerequisites
-* A parallel workflow is created.
+* A parallel workflow is created.
+
For more information, see <>.
@@ -253,9 +251,7 @@ curl -X 'POST' \
'http://localhost:8080/parallel' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
- -d '{
- "workflowdata": {}
-}'
+ -d '{}'
----
.Example response
@@ -276,4 +272,4 @@ The parallel workflow data shows the concatenated string as result, but in this
* xref:serverless-logic:getting-started/create-your-first-workflow-service.adoc[Creating your first workflow service]
-include::../../pages/_common-content/report-issue.adoc[]
\ No newline at end of file
+include::../../pages/_common-content/report-issue.adoc[]
diff --git a/modules/serverless-logic/pages/getting-started/cncf-serverless-workflow-specification-support.adoc b/modules/serverless-logic/pages/getting-started/cncf-serverless-workflow-specification-support.adoc
index ee29579b..61d24775 100644
--- a/modules/serverless-logic/pages/getting-started/cncf-serverless-workflow-specification-support.adoc
+++ b/modules/serverless-logic/pages/getting-started/cncf-serverless-workflow-specification-support.adoc
@@ -12,7 +12,9 @@ The following table shows the implementation status for each Serverless Workflow
[NOTE]
====
-{product_name} does not support link:{spec_doc_url}#Retry-Definition[Retries], link:{spec_doc_url}#sleep-state[Sleep State], and link:{spec_doc_url}#workflow-timeouts[Timeouts] features of Serverless Workflow specification.
+{product_name} does not support link:{spec_doc_url}#Retry-Definition[Retries], link:{spec_doc_url}#sleep-state[Sleep
+State], and has a limited support of link:{spec_doc_url}#workflow-timeouts[Timeouts] features of Serverless Workflow
+specification.
====
.Implementation status icons
@@ -65,7 +67,7 @@ The following table shows the implementation status for each Serverless Workflow
| link:{spec_doc_url}#Retry-Definition[Retry Definition]
| <>
-| emoji:construction[]
+| emoji:last_quarter_moon[]
| link:{spec_doc_url}#workflow-timeouts[Workflow Timeouts]
| <>
@@ -229,7 +231,10 @@ Alternatively, you can use xref:serverless-logic:core/understanding-workflow-err
[[timeouts]]
== Timeouts
-{product_name} does not support Timeouts feature, however, it will be implemented in a future release.
+{product_name} has limited support for the Timeouts feature, which covers only *Callback* and *Switch* states with events.
+Other states will be included in future releases.
+
+For more information about timeouts, see xref:serverless-logic:core/timeouts-support.adoc[Timeouts on events for {context}].
[[compensation]]
== Compensation
diff --git a/modules/serverless-logic/pages/getting-started/create-your-first-workflow-service.adoc b/modules/serverless-logic/pages/getting-started/create-your-first-workflow-service.adoc
index c04d4238..45f22ac8 100644
--- a/modules/serverless-logic/pages/getting-started/create-your-first-workflow-service.adoc
+++ b/modules/serverless-logic/pages/getting-started/create-your-first-workflow-service.adoc
@@ -52,7 +52,7 @@ quarkus create app \
-x=quarkus-resteasy-jackson \
-x=quarkus-smallrye-openapi \
--no-code \
- org.kie.kogito.examples:serverless-workflow-hello-world:1.0
+ org.acme:serverless-workflow-hello-world:1.0.0-SNAPSHOT
----
The previous command creates a Maven Quarkus project in the `serverless-workflow-hello-world` directory containing the required dependencies, including:
@@ -72,14 +72,14 @@ Apache Maven::
[source,shell,subs="attributes"]
----
mvn {quarkus_platform}:quarkus-maven-plugin:{quarkus_version}:create \
- -DprojectGroupId=org.kie.kogito.examples \
+ -DprojectGroupId=org.acme \
-DprojectArtifactId=serverless-workflow-hello-world \
-Dextensions="{kogito_sw_ga},quarkus-container-image-jib,quarkus-resteasy-jackson,quarkus-smallrye-openapi" \
-DnoCode
cd serverless-workflow-hello-world
----
-In the previous command, `org.kie.kogito.examples`, `serverless-workflow-hello-world`, and `1.0` is group ID, artifact ID, and version of your project respectively. `-DnoCode` prevents the generation of workflow example code.
+In the previous command, `org.acme`, `serverless-workflow-hello-world`, and `1.0.0-SNAPSHOT` is group ID, artifact ID, and version of your project respectively. `-DnoCode` prevents the generation of workflow example code.
--
Knative workflow CLI::
+
@@ -270,8 +270,8 @@ Also, to deploy and run your workflow application, see xref:serverless-logic:clo
.Example response
[source,shell,subs="attributes"]
----
-[INFO] ------< org.kie.kogito.examples:serverless-workflow-hello-world >-------
-[INFO] Building serverless-workflow-hello-world 1.0
+[INFO] ------< org.acme:serverless-workflow-hello-world >-------
+[INFO] Building serverless-workflow-hello-world 1.0.0-SNAPSHOT
[INFO] --------------------------------[ jar ]---------------------------------
[INFO]
[INFO] --- quarkus-maven-plugin:{quarkus_version}:dev (default-cli) @ serverless-workflow-hello-world ---
@@ -284,7 +284,7 @@ __ ____ __ _____ ___ __ ____ ______
-/ /_/ / /_/ / __ |/ , _/ ,< / /_/ /\ \
--\___\_\____/_/ |_/_/|_/_/|_|\____/___/
2022-05-25 14:38:09,741 INFO [org.kie.kog.add.qua.mes.com.QuarkusKogitoExtensionInitializer] (Quarkus Main Thread) Registered Kogito CloudEvent extension
-2022-05-25 14:38:09,840 INFO [io.quarkus] (Quarkus Main Thread) serverless-workflow-hello-world 1.0 on JVM (powered by Quarkus {quarkus_version}) started in 6.470s. Listening on: http://localhost:8080
+2022-05-25 14:38:09,840 INFO [io.quarkus] (Quarkus Main Thread) serverless-workflow-hello-world 1.0.0-SNAPSHOT on JVM (powered by Quarkus {quarkus_version}) started in 6.470s. Listening on: http://localhost:8080
2022-05-25 14:38:09,843 INFO [io.quarkus] (Quarkus Main Thread) Profile dev activated. Live Coding activated.
2022-05-25 14:38:09,843 INFO [io.quarkus] (Quarkus Main Thread) Installed features: [cache, cdi, jackson-jq, kogito-addon-messaging-extension, kogito-processes, kogito-serverless-workflow, reactive-routes, rest-client, rest-client-jackson, resteasy, resteasy-jackson, smallrye-context-propagation, smallrye-openapi, smallrye-reactive-messaging, smallrye-reactive-messaging-http, swagger-ui, vertx]
2022-05-25 14:38:12,877 INFO [org.kie.kog.qua.pro.dev.DataIndexInMemoryContainer] (docker-java-stream--938264210) STDOUT: __ ____ __ _____ ___ __ ____ ______
diff --git a/modules/serverless-logic/pages/security/authention-support-for-openapi-services.adoc b/modules/serverless-logic/pages/security/authention-support-for-openapi-services.adoc
index d6f87660..710d3a13 100644
--- a/modules/serverless-logic/pages/security/authention-support-for-openapi-services.adoc
+++ b/modules/serverless-logic/pages/security/authention-support-for-openapi-services.adoc
@@ -45,7 +45,7 @@ The following shows the example of security scheme definitions:
}
----
-If the OpenAPI specification file contains `securitySchemes` definitions, but not link:{open_api_spec_url}[Security Requirement Object] definitions, the generator can be configured to create the security requirement objects by default. In this case, for all operations without a security requirement, the default one is created. Note that the property value needs to match the name of a security scheme object definition, such as `http-basic-example` or `api-key-example` in the previous `securitySchemes` list.
+If the OpenAPI specification file contains `securitySchemes` definitions, but not the link:{open_api_spec_url}#security-requirement-object[Security Requirement Object] definitions, the generator is configured to create the security requirement objects by default. In this case, for all the operations without a security requirement, the default one is created. Note that the property value must match the name of a security scheme object definition, such as `http-basic-example` or `api-key-example` in the previous `securitySchemes` list.
[cols="20%,40%,40%", options="header"]
|===
@@ -328,7 +328,7 @@ The following example shows `security-example.json` file, defining a `sayHelloOa
}
----
-Unlike the `http basic`, `http bearer`, and `apiKey` security schemes, the OAuth 2.0 authentication relies on the link:https://quarkus.io/guides/security-openid-connect-client[Quarkus OpenId Connect (OIDC) and OAuth 2.0 Clients and Filters]. Therefore, you must add link:https://quarkus.io/guides/security-openid-connect-client#oidc-client-filter[Quarkus OIDC Client Filter Extension] to your project as shown in the following example:
+Unlike the `http basic`, `http bearer`, and `apiKey` security schemes, the OAuth 2.0 authentication relies on the link:https://quarkus.io/guides/security-openid-connect-client[Quarkus OpenId Connect (OIDC) and OAuth 2.0 Clients and Filters]. Therefore, you must add the link:https://quarkus.io/guides/security-openid-connect-client-reference#oidc-client-filter[Quarkus OIDC Client Filter Extension] to your project as shown in the following example:
.Example of adding Quarkus OIDC client filter extension
[source, xml]
diff --git a/modules/serverless-logic/pages/security/orchestrating-third-party-services-with-oauth2.adoc b/modules/serverless-logic/pages/security/orchestrating-third-party-services-with-oauth2.adoc
index 4b0f6ebf..0b32b78d 100644
--- a/modules/serverless-logic/pages/security/orchestrating-third-party-services-with-oauth2.adoc
+++ b/modules/serverless-logic/pages/security/orchestrating-third-party-services-with-oauth2.adoc
@@ -456,8 +456,11 @@ Also, you can use the alternatives defined in the link:{quarkus_guide_config_ref
Once you clone the `serverless-workflow-oauth2-orchestration-quarkus` example application from GitHub repository, you can run the example application.
.Prerequisites
-* You have an account in GitHub.
-* Apache Maven {maven_min_version} or later is installed.
+* link:{java_install_url}[Java] {java_min_version} is installed.
+* link:{maven_install_url}[Maven] {maven_min_version} or later is installed.
+* link:{docker_install_url}[Docker] {docker_min_version} or later is installed.
+* (Optional) link:{docker_compose_install_url}[Docker compose] {docker_compose_min_version} or later is installed.
+
.Procedure
. In a command terminal, clone the `kogito-examples` repository and navigate to the cloned directory:
@@ -544,12 +547,10 @@ curl -X 'POST' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
-d '{
- "workflowdata": {
- "currencyFrom": "EUR",
- "currencyTo": "USD",
- "exchangeDate": "2022-06-10",
- "amount": 2.0
- }
+ "currencyFrom": "EUR",
+ "currencyTo": "USD",
+ "exchangeDate": "2022-06-10",
+ "amount": 2.0
}'
----
Example response::
@@ -585,12 +586,10 @@ curl -X 'POST' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
-d '{
- "workflowdata": {
"currencyFrom": "EUR",
"currencyTo": "MXN",
"exchangeDate": "2022-06-10",
"amount": 2.0
- }
}'
----
Example response::
@@ -632,12 +631,10 @@ curl -X 'POST' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
-d '{
- "workflowdata": {
"currencyFrom": "EUR",
"currencyTo": "USD",
"exchangeDate": "2022-06-10",
"amount": 2.0
- }
}'
----
Example response::
@@ -669,4 +666,4 @@ In this example the error indicates that it was not possible to contact the `acm
* xref:serverless-logic:service-orchestration/orchestration-of-openapi-based-services.adoc[Orchestrating the OpenAPI services]
-include::../../pages/_common-content/report-issue.adoc[]
\ No newline at end of file
+include::../../pages/_common-content/report-issue.adoc[]
diff --git a/modules/serverless-logic/pages/service-orchestration/configuring-openapi-services-endpoints.adoc b/modules/serverless-logic/pages/service-orchestration/configuring-openapi-services-endpoints.adoc
index a01fc89b..8d46f68f 100644
--- a/modules/serverless-logic/pages/service-orchestration/configuring-openapi-services-endpoints.adoc
+++ b/modules/serverless-logic/pages/service-orchestration/configuring-openapi-services-endpoints.adoc
@@ -10,7 +10,7 @@
This document describes how you can configure OpenAPI service endpoints in {context}.
[[con-config-openapi-services-endpoints]]
-== Overview
+== Overview
{product_name} leverages MicroProfile REST Client to invoke OpenAPI services. Therefore, you can configure the OpenAPI services by following the MicroProfile Config specification. For the list of properties to configure in the MicroProfile REST Client specification, see link:https://download.eclipse.org/microprofile/microprofile-rest-client-2.0/microprofile-rest-client-spec-2.0.html#mpconfig[Support for MicroProfile Config] in MicroProfile REST Client documentation.
@@ -115,7 +115,7 @@ A Kubernetes service endpoint can be used as a service URL if the target service
=== Using URI alias
-As an alternative to `kogito.sw.operationIdStrategy`, you can assign an alias name to an URI by using `workflow-uri-definitions` custom link:{spec_doc_url}#extensions[extension]. Then you can use that alias as configuration key and in function definitions.
+As an alternative to `kogito.sw.operationIdStrategy`, you can assign an alias name to an URI by using `workflow-uri-definitions` custom link:{spec_doc_url}#extensions[extension]. Then you can use that alias as configuration key and in function definitions.
.Example workflow
[source,json]
@@ -126,12 +126,12 @@ As an alternative to `kogito.sw.operationIdStrategy`, you can assign an alias na
"remoteCatalog": "https://my.remote.host/apicatalog/apis/123/document",
}
}
- ],
+ ],
"functions": [
{
"name": "operation1",
"operation": "remoteCatalog#operation1"
- },
+ },
{
"name": "operation2",
"operation": "remoteCatalog#operation2"
@@ -322,7 +322,7 @@ You can access the `real-stock-service` service at `http://localhost:8383/`.
. In a separate command terminal window, run the `fake-stock-service` service:
+
--
-.Run `fake-stock-service` service
+.Run `fake-stock-service` service
[source,shell]
----
cd fake-stock-service
@@ -335,7 +335,7 @@ You can access the`fake-stock-service` service at `http://localhost:8181/`.
[[proc-config-openapi-services-running-sw-application-in-development-mode]]
=== Running workflow application in development mode
-When you define `%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/`, the `fake-stock-service` service is used in the development mode and you get the same result every time you run the workflow. Using this example, you can run the workflow application in development mode.
+When you define `%dev.quarkus.rest-client.stock_svc_yaml.url=http://localhost:8181/`, the `fake-stock-service` service is used in the development mode and you get the same result every time you run the workflow. Using this example, you can run the workflow application in development mode.
.Prerequisites
* Services that the workflow application sends requests to are started.
@@ -364,7 +364,7 @@ curl -X 'POST' \
'http://localhost:8080/stockprofit' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
- -d '{ "workflowdata": {"symbol": "KGTO" } }'
+ -d '{ "symbol": "KGTO" }'
----
.Example response
@@ -418,7 +418,7 @@ curl -X 'POST' \
'http://localhost:8080/stockprofit' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
- -d '{ "workflowdata": {"symbol": "KGTO" } }'
+ -d '{ "symbol": "KGTO" }'
----
.Example response
@@ -465,7 +465,7 @@ curl -X 'POST' \
'http://localhost:8080/stockprofit' \
-H 'accept: */*' \
-H 'Content-Type: application/json' \
- -d '{ "workflowdata": {"symbol": "KGTO" } }'
+ -d '{ "symbol": "KGTO" }'
----
.Example response
@@ -482,4 +482,4 @@ Note that, in the previous example, you overwrote the property defined in the `a
* xref:serverless-logic:service-orchestration/orchestration-of-openapi-based-services.adoc[Orchestrating the OpenAPI services]
* link:{quarkus-profiles-url}[Quarkus configuration guide]
-include::../../pages/_common-content/report-issue.adoc[]
\ No newline at end of file
+include::../../pages/_common-content/report-issue.adoc[]
diff --git a/modules/serverless-logic/pages/service-orchestration/orchestration-of-grpc-services.adoc b/modules/serverless-logic/pages/service-orchestration/orchestration-of-grpc-services.adoc
index 2f3b22b7..059a1a7d 100644
--- a/modules/serverless-logic/pages/service-orchestration/orchestration-of-grpc-services.adoc
+++ b/modules/serverless-logic/pages/service-orchestration/orchestration-of-grpc-services.adoc
@@ -149,7 +149,7 @@ In the same GitHub repository as the example application, there is a link:{grpc_
== Default enum values
gRPC link:{grpc_enum_url}[specification] requires enumeration types to have a default value. The default value is not included in the server response payload. Therefore, use an empty value such as `UNKNOWN` as default.
-If, for any reason, your default value is semantically valid and you want the value to be included in the workflow model, you must set `kogito.grpc.enum.includeDefault` property to true. This way enumeration fields are always filled by the workflow if the server response does not include them.
+If, for any reason, your default value is semantically valid and you want the value to be included in the workflow model, you must set `kogito.grpc.enum.includeDefault` property to true. This way enumeration fields are always filled by the workflow if the server response does not include them.
[[running-serverless-workflow-application]]
== Running the workflow application
@@ -161,21 +161,21 @@ If, for any reason, your default value is semantically valid and you want the va
mvn compile exec:java -Dexec.mainClass="org.kie.kogito.examples.sw.greeting.GreeterService"
----
-. Now you need to run the workflow application
+. Once the server is running, you must navigate to the `serverless-workflow-greeting-client-rpc-quarkus` directory in a separate command terminal and run the workflow application by entering the following command:
+
[source,shell]
----
mvn clean quarkus:dev
----
-. Once the workflow application is started, you can invoke the workflow instance using any http client, such as `curl`
+. Once the workflow application is started, you can invoke the workflow instance using any HTTP client, such as `curl`, from a separate command terminal.
=== Simple gRPC
.Example request
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {"name": "John", "language": "English"}}' http://localhost:8080/jsongreet
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John", "language": "English"}' http://localhost:8080/jsongreet
----
.Example response
@@ -188,7 +188,7 @@ You can also try greeting in a different language.
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {"name": "Javi", "language": "Spanish"}}' http://localhost:8080/jsongreet
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "Javi", "language": "Spanish"}' http://localhost:8080/jsongreet
----
In response, you will see the greeting in Spanish language.
@@ -198,7 +198,7 @@ In response, you will see the greeting in Spanish language.
.Example request
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {"name": "John"}}' http://localhost:8080/jsongreetserverstream
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"name": "John"}' http://localhost:8080/jsongreetserverstream
----
.Example response
@@ -212,12 +212,7 @@ curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d
.Example request
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {
- "helloRequests" : [
- {"name" : "Javierito", "language":"Spanish"},
- {"name" : "John", "language":"English"},
- {"name" : "Jan", "language":"Czech"}
- ]}}' http://localhost:8080/jsongreetclientstream
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"helloRequests" : [{"name" : "Javierito", "language":"Spanish"}, {"name" : "John", "language":"English"}, {"name" : "Jan", "language":"Czech"}]}' http://localhost:8080/jsongreetclientstream
----
.Example response
@@ -241,12 +236,7 @@ curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d
.Example request
[source,shell]
----
-curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"workflowdata" : {
- "helloRequests" : [
- {"name" : "Javierito", "language":"Spanish"},
- {"name" : "John", "language":"English"},
- {"name" : "Jan", "language":"Czech"}
- ]}}' http://localhost:8080/jsongreetbidistream
+curl -X POST -H 'Content-Type:application/json' -H 'Accept:application/json' -d '{"helloRequests" : [{"name" : "Javierito", "language":"Spanish"},{"name" : "John", "language":"English"},{"name" : "Jan", "language":"Czech"}]}' http://localhost:8080/jsongreetbidistream
----
.Example response
diff --git a/modules/serverless-logic/pages/testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc b/modules/serverless-logic/pages/testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc
index f9e2a38f..dd38cdda 100644
--- a/modules/serverless-logic/pages/testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc
+++ b/modules/serverless-logic/pages/testing-and-troubleshooting/basic-integration-tests-with-restassured.adoc
@@ -6,9 +6,9 @@
This document describes how to test your workflow application using REST Assured.
-Rest Assured enables you to test REST APIs using Java libraries and integrates with Apache Maven. For more information about REST Assured, see link:https://rest-assured.io/[REST Assured page].
+REST Assured enables you to test REST APIs using Java libraries and integrates with Apache Maven. For more information about REST Assured, see link:https://rest-assured.io/[REST Assured page].
-The testing procedure in this document is based on the `serverless-workflow-examples/serverless-workflow-testing-with-rest-assured` example application. You can access this example application in link:{kogito_sw_examples_url}[Kogito Examples] GitHub repository.
+The testing procedure in this document is based on the `serverless-workflow-examples/serverless-workflow-testing-with-rest-assured` example application. You can access this example application in link:{kogito_sw_examples_url}/serverless-workflow-testing-with-rest-assured[Kogito Examples] GitHub repository.
The following procedure describes how to test a workflow application that exposes the `hello` endpoint, which expects a `name` as a request parameter and returns `greeting` value:
@@ -16,9 +16,7 @@ The following procedure describes how to test a workflow application that expose
[source,json]
----
{
- "workflowdata": {
- "name": "John Doe"
- }
+ "name": "John Doe"
}
----
@@ -42,6 +40,21 @@ This document is based on the `serverless-workflow-testing-with-rest-assured` ex
====
.Procedure
+
+. Check if your project has the REST Assured dependency in your `pom.xml` file. If it doesn't, add it like the following:
+
++
+--
+[source,xml]
+----
+
+ io.rest-assured
+ rest-assured
+ test
+
+----
+--
+
. Create a test class named `HelloTest` in the `src/test/java/org/kie/kogito/examples/` directory, containing the following content:
+
--
@@ -72,7 +85,7 @@ class HelloTest {
given()
.contentType(ContentType.JSON) <2>
.accept(ContentType.JSON) <3>
- .body("{\"workflowdata\": {\"name\": \"John Doe\"}}") <4>
+ .body("{\"name\": \"John Doe\"}") <4>
.when()
.post("/hello") <5>
.then()
@@ -85,7 +98,7 @@ class HelloTest {
<1> Enables logging of the request and response when the test fails.
<2> Defines JSON as the content type of the request.
<3> Specifies the `accept` header of the request. This is an alternative for `header("Accept", "application/json")`.
-<4> Defines the request body as `{"workflowdata": {"name": "John Doe"}}`.
+<4> Defines the request body as `{"name": "John Doe"}`.
<5> Specifies the request as a POST method to the `/hello` URL.
<6> Defines `201` as the expected response status code.
<7> Defines that `Hello, John Doe` is expected in the `workflowdata.greeting` JSON path.
diff --git a/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-overview.adoc b/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-overview.adoc
index 4d22f4cc..f9abf00d 100644
--- a/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-overview.adoc
+++ b/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-overview.adoc
@@ -41,23 +41,23 @@ Executing the previous command adds the following dependency to `pom.xml` file o
----
--
-. Enter the following command to add `kogito-addons-quarkus-process-svg` extension that provides SVG diagrams to the consoles:
+. Enter the following command to add the `kogito-addons-quarkus-source-files` extension that provides the source code to generate the Serverless Workflow diagram in the consoles:
+
--
-.Install Kogito SVG add-on extension
+.Install Kogito source files add-on extension
[source,shell]
----
-quarkus ext add org.kie.kogito:kogito-addons-quarkus-process-svg
+quarkus ext add org.kie.kogito:kogito-addons-quarkus-source-files
----
Executing the previous command adds the following dependency to `pom.xml` file of your project:
-.Process-SVG dependency in `pom.xml` file
+.source files add-on dependency in `pom.xml` file
[source,xml]
----
org.kie.kogito
- kogito-addons-quarkus-process-svg
+ kogito-addons-quarkus-source-files
----
--
diff --git a/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-definition-page.adoc b/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-definition-page.adoc
index 2104aa5e..22c3593a 100644
--- a/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-definition-page.adoc
+++ b/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-definition-page.adoc
@@ -15,7 +15,7 @@ The table on the Workflow Definitions page displays the following details:
* *Endpoint*: Displays the URL to a workflow definition.
* *Actions*: Provides a button to start a new workflow.
-The filters on the Workflow Definitions page enables you to add filters to the table. To search for a specific workflow definition, enter the name of the workflow in the *Filter by workflow name* field and click *Apply Filter* button. The matching workflow definitions appear as a chip below the search field.
+The filters on the Workflow Definitions page enables you to add filters to the table. To search for a specific workflow definition, enter the name of the workflow in the *Filter by workflow name* field and click *Apply Filter* button. The matching workflow definitions appear as a chip below the search field.
To clear the applied filters, you can click the *Reset to default* button. Also, to fetch newly added workflow definitions, click on the refresh icon next to the *Apply Filter* button.
@@ -23,28 +23,32 @@ To clear the applied filters, you can click the *Reset to default* button. Also,
image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-definitions-filter.png[]
[[con-trigger-cloud-event-page]]
-== Trigger cloud event page
+== Start New Workflow page
-The Trigger cloud event page is used to trigger the cloud events related to a workflow definition. To navigate to the *Trigger cloud event* page, click on the play button in the *Actions* column of workflow definitions table.
+The *Start New Workflow* page is used to start a workflow instance through a cloud event or a custom form, depending on the workflow configuration. To navigate to the *Start New Workflow* page, click the play button in the *Actions* column of the workflow definitions table.
-.Trigger cloud event page
+*If there is no JSON schema for the workflow, then the workflow is started by triggering a cloud event.
+To trigger a cloud event, you can use the *Cloud Event Type* and *Cloud Event Data* input fields to set the type of a cloud event and event payload in JSON format respectively and click the *Start* button to start a workflow. The *Reset* button on the page resets the values entered in the given fields.
+
+.Starting a workflow using a cloud event
image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events.png[]
-The *Trigger cloud event* page consists of the two fields, including *Type* and *Data*. To trigger a cloud event, you can use the *Type* and *Data* input fields to set the type of a cloud event and event payload in JSON format respectively, and click the *Send* button to start a workflow. The *Reset* button on the page resets the values entered in the given fields.
+If there is a JSON schema for the workflow configured in the `dataInputSchema` property, then a form is displayed to start a new workflow instance. You can fill in the required form details and click the *Start* button to trigger the workflow.
+The *Reset* button is used to clear the form data.
-.Example values to trigger a workflow
-image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-values.png[]
+.Starting a workflow using the form
+image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-events-custom-form.png[]
-You can also use the *Business key* text box to define a custom business key value to the workflow instance. If the *Business Key* field is blank, then an auto-generated business key is defined to the workflow instance.
+You can also use the *Business key* text box to define a custom business key value to the workflow instance. If the *Business Key* field is blank, then an auto-generated business key is defined for the workflow instance.
When a workflow instance starts successfully, a success alert appears on the top of the screen, which contains a *Go to workflow list* option. The *Go to workflow list* option enables you to navigate to the xref:serverless-logic:tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-instances-page.adoc[Workflow Instances page].
.Example of trigger workflow success alert
-image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-success-alert.png[]
+image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-success-alert.png[]
-If incorrect values are entered in the *Type* and *Data* fields, then a failure alert appears on the top of the screen, containing *View Details* and *Go to workflow list* options. The *View Details* enables you to view the error message.
+If there is an issue while starting a workflow, then a failure alert appears on the top of the screen, containing *View Details* and *Go to workflow list* options. The *View Details* enables you to view the error message.
.Example of trigger workflow failure alert
-image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-trigger-cloud-event-fail-alert.png[]
+image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-start-workflow-fail-alert.png[]
-include::../../../pages/_common-content/report-issue.adoc[]
\ No newline at end of file
+include::../../../pages/_common-content/report-issue.adoc[]
diff --git a/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-instances-page.adoc b/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-instances-page.adoc
index 7ccb6ed2..bf74e1d1 100644
--- a/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-instances-page.adoc
+++ b/modules/serverless-logic/pages/tooling/quarkus-dev-ui-extension/quarkus-dev-ui-workflow-instances-page.adoc
@@ -39,31 +39,46 @@ image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-details-page.p
The Workflow Details page consists of the following panels:
-* Diagram panel
+* Serverless Workflow Diagram panel
* Timeline panel
* Details panel
* Variables panel
-Diagram panel::
+Serverless Workflow Diagram panel::
+
--
-The Diagram panel enables you to explore the workflow diagram and execution path of the workflow instance. The workflow diagram and execution path are displayed when the Kogito process SVG configuration is added in your project.
+The Serverless Workflow Diagram panel enables you to explore the workflow diagram and execution path of the workflow instance. The workflow diagram and execution path are displayed by consuming the source which is exposed through the `kogito-addons-quarkus-source-files`.
-.Diagram panel
-image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-diagram-panel.png[]
+To add the source files add-on configuration, add the following dependency to `pom.xml` file of your project:
-To add the Kogito SVG process add-on configuration, add the following dependency to `pom.xml` file of your project:
-
-.Process-SVG dependency in `pom.xml` file
+.source-files add-on dependency in `pom.xml` file
[source,xml]
----
org.kie.kogito
- kogito-addons-quarkus-process-svg
+ kogito-addons-quarkus-source-files
----
-You can also generate SVG file using the VS Code extension for Serverless Workflow editor and then place the generated SVG in the `src/main/resources/META-INF/processSVG` folder of your project. The Kogito SVG process add-on exposes the SVG diagram to Quarkus Dev UI, and Dev UI reads the diagram and provides the workflow *Diagram* panel.
+There are two ways to display the diagram.
+
+1. Stunner Diagram
+2. Mermaid Diagram
+
+By default, the Stunner diagram is displayed. To toggle between the two diagrams you can use the `kogito.swf.stunner.enabled`(defaulted to true) environment variable in your application properties.
+
+[NOTE]
+====
+For yaml-based workflow files, the mermaid diagram is displayed instead of the stunner diagram.
+====
+
+There is a slider available in the diagram panel, which when dragged to the right displays the source code in read-only mode.
+
+.Stunner based Diagram panel
+image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-stunner-diagram-panel.png[]
+
+.Mermaid based Diagram panel
+image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-mermaid-diagram-panel.png[]
--
Timeline panel::
@@ -101,4 +116,4 @@ The Variables panel displays the data of a workflow in the form of JSON.
image::tooling/quarkus-dev-ui-extension/kogito-swf-tools-workflow-variables-panel.png[]
--
-include::../../../pages/_common-content/report-issue.adoc[]
\ No newline at end of file
+include::../../../pages/_common-content/report-issue.adoc[]
diff --git a/modules/serverless-logic/pages/use-cases/orchestration-based-saga-pattern.adoc b/modules/serverless-logic/pages/use-cases/orchestration-based-saga-pattern.adoc
index daf0fe5c..23f3a083 100644
--- a/modules/serverless-logic/pages/use-cases/orchestration-based-saga-pattern.adoc
+++ b/modules/serverless-logic/pages/use-cases/orchestration-based-saga-pattern.adoc
@@ -8,16 +8,16 @@
[[con-saga-overview]]
== Overview of Saga pattern
-The Saga design pattern manages data consistency across participants that are available in distributed transaction scenarios. For more information about Saga pattern, see the initial link:https://www.cs.cornell.edu/andru/cs711/2002fa/reading/sagas.pdf[publication].
+The Saga design pattern manages data consistency across participants that are available in distributed transaction scenarios. For more information about Saga pattern, see the initial link:https://www.cs.cornell.edu/andru/cs711/2002fa/reading/sagas.pdf[publication].
-In a microservice architecture, you can define a participant as microservice, which is responsible to perform actions related to the business domain.
+In a microservice architecture, you can define a participant as microservice, which is responsible to perform actions related to the business domain.
The Saga pattern manages the transactions using a sequence of steps. If a failure occurs while executing a step, then a sequence of compensating actions is executed to undo the changes that are made during the execution. As an alternative, you can leave the system in a known termination state to be consistent.
[[ref-sw-example-saga-pattern]]
== Example of Saga pattern a workflow
-To understand the implementation of Saga pattern in a workflow, you can use the link:{kogito_sw_examples_url}/serverless-workflow-saga-quarkus[`serverless-workflow-saga-quarkus`] example application in GitHub repository.
+To understand the implementation of Saga pattern in a workflow, you can use the link:{kogito_sw_examples_url}/serverless-workflow-saga-quarkus[`serverless-workflow-saga-quarkus`] example application in GitHub repository.
The `serverless-workflow-saga-quarkus` example application is based on the order fulfillment process and describes how to define Saga pattern using {context}. In the order fulfillment example, a user buys an item from an e-commerce application. The user adds the delivery information and payment details, and waits for the item to be delivered. The following figure shows the sequence of steps that are executed to complete an order:
@@ -46,14 +46,14 @@ However, in the failure workflow an error occurred during the shipping process.
In the `serverless-workflow-saga-quarkus` example application, a link:{spec_doc_url}[workflow] is used that implements the Saga pattern, in which all the steps and compensation actions are defined. Also, the workflow plays the role of Saga Executor Coordinator (SEC), which orchestrates the calls to the participants in the Saga pattern.
-The workflow definition used to define the Saga pattern is available in the link:{kogito_sw_examples_url}/serverless-workflow-saga-quarkus/src/main/resources/order-saga-error-handling.sw.json[`order-saga-error-handling.sw.json`] file.
+The workflow definition used to define the Saga pattern is available in the link:{kogito_sw_examples_url}/serverless-workflow-saga-quarkus/src/main/resources/order-saga-error-handling.sw.json[`order-saga-error-handling.sw.json`] file.
.Example of order fulfillment Saga workflow
image::use-cases/orchestration-based-saga-pattern/order-fulfillment-saga-workflow.png[]
In the previous example figure of workflow, the calls are orchestrated to the participants (for example, order service, payment service), each participant can throw possible errors, and compensations for each step are defined, that are executed once an error appears during the workflow execution.
-To define the interactions among participants in the Saga pattern using Serverless Workflow specification, you can use link:{spec_doc_url}#workflow-states[workflow states] with link:{spec_doc_url}#Transitions[transitions].
+To define the interactions among participants in the Saga pattern using Serverless Workflow specification, you can use link:{spec_doc_url}#workflow-states[workflow states] with link:{spec_doc_url}#Transitions[transitions].
In Serverless Workflow specification, each workflow state represents a step to be completed in the Saga pattern. Also, an action associated with the workflow state represents how a participant is invoked to execute a given step.
@@ -108,7 +108,7 @@ In {context} each workflow state must define a compensation action using `compen
Errors::
+
--
-In {context} errors are identified by a name and can be associated with a workflow state. For example, a `process payment failed` error is associated with the `processPayment` state.
+In {context} errors are identified by a name and can be associated with a workflow state. For example, a `process payment failed` error is associated with the `processPayment` state.
Following is an example of error declaration in the workflow definition:
@@ -134,9 +134,9 @@ Following is an example of error declaration in the workflow definition:
}
----
-Once an error occurs during the workflow execution, the associated compensation action is triggered.
+Once an error occurs during the workflow execution, the associated compensation action is triggered.
-An error definition uses the fully qualified class name (FQCN) for Java exceptions that are thrown by functions. In the previous example of error definition, `org.kie.kogito.ServiceException` is thrown by the service calls that are defined as <<#custom-function, Java methods>> in the link:{kogito_sw_examples_url}/serverless-workflow-saga-quarkus/src/main/java/org/kie/kogito/PaymentService.java[`PaymentService.java`] file.
+An error definition uses the fully qualified class name (FQCN) for Java exceptions that are thrown by functions. In the previous example of error definition, `org.kie.kogito.ServiceException` is thrown by the service calls that are defined as <<#custom-function, Java methods>> in the link:{kogito_sw_examples_url}/serverless-workflow-saga-quarkus/src/main/java/org/kie/kogito/PaymentService.java[`PaymentService.java`] file.
[#custom-function]
.Example custom function using a Java class and method
@@ -152,7 +152,7 @@ An error definition uses the fully qualified class name (FQCN) for Java exceptio
The function that are throwing errors can be any type of functions, such as REST, OpenAPI, or gRPC. For information about error handling, see xref:serverless-logic:core/understanding-workflow-error-handling.adoc[Error handling in {context}].
--
-The workflow engine controls the execution of the flow and keeps the track of the steps that need to be compensated. Also, the engine ensures that compensated states are executed in reverse order of each completed step.
+The workflow engine controls the execution of the flow and keeps the track of the steps that need to be compensated. Also, the engine ensures that compensated states are executed in reverse order of each completed step.
The engine is a stateful, allowing Saga to contain wait states, such as callbacks. After each wait state, the workflow is persisted and can continue once it receives a request or event.
@@ -172,9 +172,7 @@ You can use the following example to send a request for creating an order:
[source,shell]
----
curl -L -X POST "http://localhost:8080/order_saga_error_workflow" -H 'Content-Type: application/json' --data-raw '{
- "workflowdata": {
"orderId": "03e6cf79-3301-434b-b5e1-d6899b5639aa"
- }
}'
----
@@ -205,7 +203,7 @@ curl -L -X POST "http://localhost:8080/order_saga_error_workflow" -H 'Content-Ty
}
----
-The response contains the workflow data with nested attributes, which represent the responses from the execution of each step including success or failure.
+The response contains the workflow data with nested attributes, which represent the responses from the execution of each step including success or failure.
In the previous example, the `orderResponse` attribute indicates if the order can be confirmed by the client by initiating the Saga workflow. Therefore, if the value of the `orderResponse` attribute is `success`, then the order can be confirmed, otherwise the order can be canceled.
@@ -230,10 +228,8 @@ To test the workflow, an optional `failService` attribute is introduced, indicat
[source,shell]
----
curl -L -X POST 'http://localhost:8080/order_saga_error_workflow' -H 'Content-Type: application/json' --data-raw '{
- "workflowdata": {
"orderId": "03e6cf79-3301-434b-b5e1-d6899b5639aa",
"failService": "ShippingService"
- }
}'
----
@@ -288,4 +284,4 @@ When executing the application, you can also verify the log with information rel
* xref:serverless-logic:core/understanding-workflow-error-handling.adoc[Error handling in {context}]
-include::../../pages/_common-content/report-issue.adoc[]
\ No newline at end of file
+include::../../pages/_common-content/report-issue.adoc[]