diff --git a/docs/antora-playbook.yml b/docs/antora-playbook.yml index 1b3c01eb2..5fc02704f 100644 --- a/docs/antora-playbook.yml +++ b/docs/antora-playbook.yml @@ -10,3 +10,7 @@ content: asciidoc: extensions: - "@djencks/asciidoctor-antora-indexer" +ui: + bundle: + url: https://gitlab.com/antora/antora-ui-default/-/jobs/artifacts/master/raw/build/ui-bundle.zip?job=bundle-stable + snapshot: true diff --git a/docs/modules/ROOT/kamelet.adoc.tmpl b/docs/modules/ROOT/kamelet.adoc.tmpl new file mode 100644 index 000000000..260e46be6 --- /dev/null +++ b/docs/modules/ROOT/kamelet.adoc.tmpl @@ -0,0 +1,83 @@ +// THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT += {{ .Image }} {{ .Kamelet.Spec.Definition.Title }} + +*Provided by: "{{ index .Kamelet.ObjectMeta.Annotations "camel.apache.org/provider" }}"* + +*Support Level for this Kamelet is: "{{ index .Kamelet.ObjectMeta.Annotations "camel.apache.org/kamelet.support.level" }}"* + +{{ .Kamelet.Spec.Definition.Description }} + +== Configuration Options +{{ if .HasProperties }} +The following table summarizes the configuration options available for the `{{ .Kamelet.ObjectMeta.Name }}` Kamelet: +{{ .Properties }} +NOTE: Fields marked with ({empty}*) are mandatory. +{{ else }} +The Kamelet does not specify any configuration option. +{{ end }} +== Usage + +This section summarizes how the `{{ .Kamelet.ObjectMeta.Name }}` can be used in various contexts. + +=== Knative {{ index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type" | ToCamel }} + +{{ if eq (index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type") "action" -}} +The `{{ .Kamelet.ObjectMeta.Name }}` Kamelet can be used as intermediate step in a Knative binding. +{{ else -}} +The `{{ .Kamelet.ObjectMeta.Name }}` Kamelet can be used as Knative {{ index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type" }} by binding it to a Knative object. +{{ end }} +{{ .ExampleBinding "messaging.knative.dev/v1" "InMemoryChannel" "mychannel" }} +Make sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `{{ .Kamelet.ObjectMeta.Name }}-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the {{ index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type" }} using the following command: + +[source,shell] +---- +kubectl apply -f {{ .Kamelet.ObjectMeta.Name }}-binding.yaml +---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +{{ .ExampleKamelBindCommand "channel/mychannel" }} +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka {{ index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type" | ToCamel }} + +{{ if eq (index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type") "action" -}} +The `{{ .Kamelet.ObjectMeta.Name }}` Kamelet can be used as intermediate step in a Kafka binding. +{{ else -}} +The `{{ .Kamelet.ObjectMeta.Name }}` Kamelet can be used as Kafka {{ index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type" }} by binding it to a Kafka topic. +{{ end }} +{{ .ExampleBinding "kafka.strimzi.io/v1beta1" "KafkaTopic" "my-topic" }} +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `{{ .Kamelet.ObjectMeta.Name }}-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the {{ index .Kamelet.ObjectMeta.Labels "camel.apache.org/kamelet.type" }} using the following command: + +[source,shell] +---- +kubectl apply -f {{ .Kamelet.ObjectMeta.Name }}-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +{{ .ExampleKamelBindCommand "kafka.strimzi.io/v1beta1:KafkaTopic:my-topic" }} +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +// THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/avro-deserialize-action.adoc b/docs/modules/ROOT/pages/avro-deserialize-action.adoc index 077df053b..5ed8b7673 100644 --- a/docs/modules/ROOT/pages/avro-deserialize-action.adoc +++ b/docs/modules/ROOT/pages/avro-deserialize-action.adoc @@ -25,7 +25,7 @@ This section summarizes how the `avro-deserialize-action` can be used in various === Knative Action -The `avro-deserialize-action` Kamelet can be used as intermediate step in a binding. +The `avro-deserialize-action` Kamelet can be used as intermediate step in a Knative binding. .avro-deserialize-action-binding.yaml [source,yaml] @@ -67,4 +67,73 @@ You can run the action using the following command: ---- kubectl apply -f avro-deserialize-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step avro-deserialize-action -p "step-0.schema={\"type\": \"record\", \"namespace\": \"com.example\", \"name\": \"FullName\", \"fields\": [{\"name\": \"first\", \"type\": \"string\"},{\"name\": \"last\", \"type\": \"string\"}]}" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `avro-deserialize-action` Kamelet can be used as intermediate step in a Kafka binding. + +.avro-deserialize-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: avro-deserialize-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: avro-deserialize-action + properties: + schema: "{\"type\": \"record\", \"namespace\": \"com.example\", \"name\": \"FullName\", \"fields\": [{\"name\": \"first\", \"type\": \"string\"},{\"name\": \"last\", \"type\": \"string\"}]}" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `avro-deserialize-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f avro-deserialize-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step avro-deserialize-action -p "step-0.schema={\"type\": \"record\", \"namespace\": \"com.example\", \"name\": \"FullName\", \"fields\": [{\"name\": \"first\", \"type\": \"string\"},{\"name\": \"last\", \"type\": \"string\"}]}" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/avro-serialize-action.adoc b/docs/modules/ROOT/pages/avro-serialize-action.adoc index 9e0bbcc6f..8f56cadb2 100644 --- a/docs/modules/ROOT/pages/avro-serialize-action.adoc +++ b/docs/modules/ROOT/pages/avro-serialize-action.adoc @@ -25,7 +25,7 @@ This section summarizes how the `avro-serialize-action` can be used in various c === Knative Action -The `avro-serialize-action` Kamelet can be used as intermediate step in a binding. +The `avro-serialize-action` Kamelet can be used as intermediate step in a Knative binding. .avro-serialize-action-binding.yaml [source,yaml] @@ -67,4 +67,73 @@ You can run the action using the following command: ---- kubectl apply -f avro-serialize-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step avro-serialize-action -p "step-0.schema={\"type\": \"record\", \"namespace\": \"com.example\", \"name\": \"FullName\", \"fields\": [{\"name\": \"first\", \"type\": \"string\"},{\"name\": \"last\", \"type\": \"string\"}]}" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `avro-serialize-action` Kamelet can be used as intermediate step in a Kafka binding. + +.avro-serialize-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: avro-serialize-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: avro-serialize-action + properties: + schema: "{\"type\": \"record\", \"namespace\": \"com.example\", \"name\": \"FullName\", \"fields\": [{\"name\": \"first\", \"type\": \"string\"},{\"name\": \"last\", \"type\": \"string\"}]}" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `avro-serialize-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f avro-serialize-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step avro-serialize-action -p "step-0.schema={\"type\": \"record\", \"namespace\": \"com.example\", \"name\": \"FullName\", \"fields\": [{\"name\": \"first\", \"type\": \"string\"},{\"name\": \"last\", \"type\": \"string\"}]}" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-cloudwatch-sink.adoc b/docs/modules/ROOT/pages/aws-cloudwatch-sink.adoc index 33f3b4ef6..badfdc736 100644 --- a/docs/modules/ROOT/pages/aws-cloudwatch-sink.adoc +++ b/docs/modules/ROOT/pages/aws-cloudwatch-sink.adoc @@ -74,4 +74,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-cloudwatch-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-cloudwatch-sink -p "sink.accessKey=The Access Key" -p "sink.cw_namespace=The Cloud Watch Namespace" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-cloudwatch-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-cloudwatch-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-cloudwatch-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-cloudwatch-sink + properties: + accessKey: "The Access Key" + cw_namespace: "The Cloud Watch Namespace" + region: "eu-west-1" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-cloudwatch-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-cloudwatch-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-cloudwatch-sink -p "sink.accessKey=The Access Key" -p "sink.cw_namespace=The Cloud Watch Namespace" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-ddb-streams-source.adoc b/docs/modules/ROOT/pages/aws-ddb-streams-source.adoc index b8ad25a86..3151ee27d 100644 --- a/docs/modules/ROOT/pages/aws-ddb-streams-source.adoc +++ b/docs/modules/ROOT/pages/aws-ddb-streams-source.adoc @@ -67,4 +67,69 @@ You can run the source using the following command: ---- kubectl apply -f aws-ddb-streams-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-ddb-streams-source -p "source.accessKey=The Access Key" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" -p "source.table=The Table" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `aws-ddb-streams-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.aws-ddb-streams-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-ddb-streams-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-ddb-streams-source + properties: + accessKey: "The Access Key" + region: "eu-west-1" + secretKey: "The Secret Key" + table: "The Table" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-ddb-streams-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f aws-ddb-streams-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-ddb-streams-source -p "source.accessKey=The Access Key" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" -p "source.table=The Table" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-kinesis-firehose-sink.adoc b/docs/modules/ROOT/pages/aws-kinesis-firehose-sink.adoc index 99af57fed..3d7608354 100644 --- a/docs/modules/ROOT/pages/aws-kinesis-firehose-sink.adoc +++ b/docs/modules/ROOT/pages/aws-kinesis-firehose-sink.adoc @@ -65,4 +65,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-kinesis-firehose-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-kinesis-firehose-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.streamName=The Stream Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-kinesis-firehose-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-kinesis-firehose-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-kinesis-firehose-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-kinesis-firehose-sink + properties: + accessKey: "The Access Key" + region: "eu-west-1" + secretKey: "The Secret Key" + streamName: "The Stream Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-kinesis-firehose-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-kinesis-firehose-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-kinesis-firehose-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.streamName=The Stream Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-kinesis-sink.adoc b/docs/modules/ROOT/pages/aws-kinesis-sink.adoc index ddcd98abf..9ec529cba 100644 --- a/docs/modules/ROOT/pages/aws-kinesis-sink.adoc +++ b/docs/modules/ROOT/pages/aws-kinesis-sink.adoc @@ -77,4 +77,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-kinesis-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-kinesis-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.stream=The Stream Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-kinesis-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-kinesis-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-kinesis-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-kinesis-sink + properties: + accessKey: "The Access Key" + region: "eu-west-1" + secretKey: "The Secret Key" + stream: "The Stream Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-kinesis-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-kinesis-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-kinesis-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.stream=The Stream Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-kinesis-source.adoc b/docs/modules/ROOT/pages/aws-kinesis-source.adoc index aeebf92c0..f48bf19ab 100644 --- a/docs/modules/ROOT/pages/aws-kinesis-source.adoc +++ b/docs/modules/ROOT/pages/aws-kinesis-source.adoc @@ -65,4 +65,69 @@ You can run the source using the following command: ---- kubectl apply -f aws-kinesis-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-kinesis-source -p "source.accessKey=The Access Key" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" -p "source.stream=The Stream Name" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `aws-kinesis-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.aws-kinesis-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-kinesis-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-kinesis-source + properties: + accessKey: "The Access Key" + region: "eu-west-1" + secretKey: "The Secret Key" + stream: "The Stream Name" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-kinesis-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f aws-kinesis-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-kinesis-source -p "source.accessKey=The Access Key" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" -p "source.stream=The Stream Name" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-lambda-sink.adoc b/docs/modules/ROOT/pages/aws-lambda-sink.adoc index 009ab369e..123458a48 100644 --- a/docs/modules/ROOT/pages/aws-lambda-sink.adoc +++ b/docs/modules/ROOT/pages/aws-lambda-sink.adoc @@ -65,4 +65,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-lambda-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-lambda-sink -p "sink.accessKey=The Access Key" -p "sink.function=The Function Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-lambda-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-lambda-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-lambda-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-lambda-sink + properties: + accessKey: "The Access Key" + function: "The Function Name" + region: "eu-west-1" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-lambda-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-lambda-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-lambda-sink -p "sink.accessKey=The Access Key" -p "sink.function=The Function Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-s3-sink.adoc b/docs/modules/ROOT/pages/aws-s3-sink.adoc index b7627de73..8495c5ac3 100644 --- a/docs/modules/ROOT/pages/aws-s3-sink.adoc +++ b/docs/modules/ROOT/pages/aws-s3-sink.adoc @@ -72,4 +72,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-s3-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-s3-sink -p "sink.accessKey=The Access Key" -p "sink.bucketNameOrArn=The Bucket Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-s3-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-s3-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-s3-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-s3-sink + properties: + accessKey: "The Access Key" + bucketNameOrArn: "The Bucket Name" + region: "eu-west-1" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-s3-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-s3-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-s3-sink -p "sink.accessKey=The Access Key" -p "sink.bucketNameOrArn=The Bucket Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-s3-source.adoc b/docs/modules/ROOT/pages/aws-s3-source.adoc index aa43bac3d..1f2cc01f7 100644 --- a/docs/modules/ROOT/pages/aws-s3-source.adoc +++ b/docs/modules/ROOT/pages/aws-s3-source.adoc @@ -68,4 +68,69 @@ You can run the source using the following command: ---- kubectl apply -f aws-s3-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-s3-source -p "source.accessKey=The Access Key" -p "source.bucketNameOrArn=The Bucket Name" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `aws-s3-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.aws-s3-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-s3-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-s3-source + properties: + accessKey: "The Access Key" + bucketNameOrArn: "The Bucket Name" + region: "eu-west-1" + secretKey: "The Secret Key" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-s3-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f aws-s3-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-s3-source -p "source.accessKey=The Access Key" -p "source.bucketNameOrArn=The Bucket Name" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-s3-streaming-upload-sink.adoc b/docs/modules/ROOT/pages/aws-s3-streaming-upload-sink.adoc index 51de79119..0b94ce295 100644 --- a/docs/modules/ROOT/pages/aws-s3-streaming-upload-sink.adoc +++ b/docs/modules/ROOT/pages/aws-s3-streaming-upload-sink.adoc @@ -73,4 +73,70 @@ You can run the sink using the following command: ---- kubectl apply -f aws-s3-streaming-upload-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-s3-streaming-upload-sink -p "sink.accessKey=The Access Key" -p "sink.bucketNameOrArn=The Bucket Name" -p "sink.keyName=The Key Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-s3-streaming-upload-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-s3-streaming-upload-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-s3-streaming-upload-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-s3-streaming-upload-sink + properties: + accessKey: "The Access Key" + bucketNameOrArn: "The Bucket Name" + keyName: "The Key Name" + region: "eu-west-1" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-s3-streaming-upload-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-s3-streaming-upload-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-s3-streaming-upload-sink -p "sink.accessKey=The Access Key" -p "sink.bucketNameOrArn=The Bucket Name" -p "sink.keyName=The Key Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-sns-fifo-sink.adoc b/docs/modules/ROOT/pages/aws-sns-fifo-sink.adoc index 3b30769f6..ccec18442 100644 --- a/docs/modules/ROOT/pages/aws-sns-fifo-sink.adoc +++ b/docs/modules/ROOT/pages/aws-sns-fifo-sink.adoc @@ -67,4 +67,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-sns-fifo-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-sns-fifo-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.topicNameOrArn=The Topic Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-sns-fifo-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-sns-fifo-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-sns-fifo-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-sns-fifo-sink + properties: + accessKey: "The Access Key" + region: "eu-west-1" + secretKey: "The Secret Key" + topicNameOrArn: "The Topic Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-sns-fifo-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-sns-fifo-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-sns-fifo-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.topicNameOrArn=The Topic Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-sns-sink.adoc b/docs/modules/ROOT/pages/aws-sns-sink.adoc index 138ded214..48e312229 100644 --- a/docs/modules/ROOT/pages/aws-sns-sink.adoc +++ b/docs/modules/ROOT/pages/aws-sns-sink.adoc @@ -66,4 +66,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-sns-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-sns-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.topicNameOrArn=The Topic Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-sns-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-sns-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-sns-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-sns-sink + properties: + accessKey: "The Access Key" + region: "eu-west-1" + secretKey: "The Secret Key" + topicNameOrArn: "The Topic Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-sns-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-sns-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-sns-sink -p "sink.accessKey=The Access Key" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" -p "sink.topicNameOrArn=The Topic Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-sqs-batch-sink.adoc b/docs/modules/ROOT/pages/aws-sqs-batch-sink.adoc index aa76be38b..53277e6d0 100644 --- a/docs/modules/ROOT/pages/aws-sqs-batch-sink.adoc +++ b/docs/modules/ROOT/pages/aws-sqs-batch-sink.adoc @@ -68,4 +68,70 @@ You can run the sink using the following command: ---- kubectl apply -f aws-sqs-batch-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-sqs-batch-sink -p "sink.accessKey=The Access Key" -p "sink.batchSeparator=," -p "sink.queueNameOrArn=The Queue Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-sqs-batch-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-sqs-batch-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-sqs-batch-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-sqs-batch-sink + properties: + accessKey: "The Access Key" + batchSeparator: "," + queueNameOrArn: "The Queue Name" + region: "eu-west-1" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-sqs-batch-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-sqs-batch-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-sqs-batch-sink -p "sink.accessKey=The Access Key" -p "sink.batchSeparator=," -p "sink.queueNameOrArn=The Queue Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-sqs-fifo-sink.adoc b/docs/modules/ROOT/pages/aws-sqs-fifo-sink.adoc index 4a0e02cc6..7a1bc33a1 100644 --- a/docs/modules/ROOT/pages/aws-sqs-fifo-sink.adoc +++ b/docs/modules/ROOT/pages/aws-sqs-fifo-sink.adoc @@ -67,4 +67,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-sqs-fifo-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-sqs-fifo-sink -p "sink.accessKey=The Access Key" -p "sink.queueNameOrArn=The Queue Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-sqs-fifo-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-sqs-fifo-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-sqs-fifo-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-sqs-fifo-sink + properties: + accessKey: "The Access Key" + queueNameOrArn: "The Queue Name" + region: "eu-west-1" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-sqs-fifo-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-sqs-fifo-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-sqs-fifo-sink -p "sink.accessKey=The Access Key" -p "sink.queueNameOrArn=The Queue Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-sqs-sink.adoc b/docs/modules/ROOT/pages/aws-sqs-sink.adoc index bdc702691..4a3ed7b60 100644 --- a/docs/modules/ROOT/pages/aws-sqs-sink.adoc +++ b/docs/modules/ROOT/pages/aws-sqs-sink.adoc @@ -66,4 +66,69 @@ You can run the sink using the following command: ---- kubectl apply -f aws-sqs-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel aws-sqs-sink -p "sink.accessKey=The Access Key" -p "sink.queueNameOrArn=The Queue Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `aws-sqs-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.aws-sqs-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-sqs-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-sqs-sink + properties: + accessKey: "The Access Key" + queueNameOrArn: "The Queue Name" + region: "eu-west-1" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-sqs-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f aws-sqs-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic aws-sqs-sink -p "sink.accessKey=The Access Key" -p "sink.queueNameOrArn=The Queue Name" -p "sink.region=eu-west-1" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-sqs-source.adoc b/docs/modules/ROOT/pages/aws-sqs-source.adoc index 99d6a95c1..51177f5f6 100644 --- a/docs/modules/ROOT/pages/aws-sqs-source.adoc +++ b/docs/modules/ROOT/pages/aws-sqs-source.adoc @@ -67,4 +67,69 @@ You can run the source using the following command: ---- kubectl apply -f aws-sqs-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-sqs-source -p "source.accessKey=The Access Key" -p "source.queueNameOrArn=The Queue Name" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `aws-sqs-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.aws-sqs-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-sqs-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-sqs-source + properties: + accessKey: "The Access Key" + queueNameOrArn: "The Queue Name" + region: "eu-west-1" + secretKey: "The Secret Key" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-sqs-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f aws-sqs-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind aws-sqs-source -p "source.accessKey=The Access Key" -p "source.queueNameOrArn=The Queue Name" -p "source.region=eu-west-1" -p "source.secretKey=The Secret Key" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/aws-translate-action.adoc b/docs/modules/ROOT/pages/aws-translate-action.adoc index b5abbd37d..3abe5a895 100644 --- a/docs/modules/ROOT/pages/aws-translate-action.adoc +++ b/docs/modules/ROOT/pages/aws-translate-action.adoc @@ -28,7 +28,7 @@ This section summarizes how the `aws-translate-action` can be used in various co === Knative Action -The `aws-translate-action` Kamelet can be used as intermediate step in a binding. +The `aws-translate-action` Kamelet can be used as intermediate step in a Knative binding. .aws-translate-action-binding.yaml [source,yaml] @@ -74,4 +74,77 @@ You can run the action using the following command: ---- kubectl apply -f aws-translate-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step aws-translate-action -p "step-0.accessKey=The Access Key" -p "step-0.region=eu-west-1" -p "step-0.secretKey=The Secret Key" -p "step-0.sourceLanguage=it" -p "step-0.targetLanguage=en" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `aws-translate-action` Kamelet can be used as intermediate step in a Kafka binding. + +.aws-translate-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-translate-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-translate-action + properties: + accessKey: "The Access Key" + region: "eu-west-1" + secretKey: "The Secret Key" + sourceLanguage: "it" + targetLanguage: "en" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `aws-translate-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f aws-translate-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step aws-translate-action -p "step-0.accessKey=The Access Key" -p "step-0.region=eu-west-1" -p "step-0.secretKey=The Secret Key" -p "step-0.sourceLanguage=it" -p "step-0.targetLanguage=en" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/azure-eventhubs-sink.adoc b/docs/modules/ROOT/pages/azure-eventhubs-sink.adoc index c588df720..6a330fe6e 100644 --- a/docs/modules/ROOT/pages/azure-eventhubs-sink.adoc +++ b/docs/modules/ROOT/pages/azure-eventhubs-sink.adoc @@ -71,4 +71,69 @@ You can run the sink using the following command: ---- kubectl apply -f azure-eventhubs-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel azure-eventhubs-sink -p "sink.eventhubName=The Eventhubs Name" -p "sink.namespaceName=The Eventhubs Namespace" -p "sink.sharedAccessKey=The Share Access Key" -p "sink.sharedAccessName=The Share Access Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `azure-eventhubs-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.azure-eventhubs-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: azure-eventhubs-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: azure-eventhubs-sink + properties: + eventhubName: "The Eventhubs Name" + namespaceName: "The Eventhubs Namespace" + sharedAccessKey: "The Share Access Key" + sharedAccessName: "The Share Access Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `azure-eventhubs-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f azure-eventhubs-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic azure-eventhubs-sink -p "sink.eventhubName=The Eventhubs Name" -p "sink.namespaceName=The Eventhubs Namespace" -p "sink.sharedAccessKey=The Share Access Key" -p "sink.sharedAccessName=The Share Access Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/azure-eventhubs-source.adoc b/docs/modules/ROOT/pages/azure-eventhubs-source.adoc index 18cbd07df..cdcd70681 100644 --- a/docs/modules/ROOT/pages/azure-eventhubs-source.adoc +++ b/docs/modules/ROOT/pages/azure-eventhubs-source.adoc @@ -71,4 +71,72 @@ You can run the source using the following command: ---- kubectl apply -f azure-eventhubs-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind azure-eventhubs-source -p "source.blobAccessKey=The Azure Storage Blob Access Key" -p "source.blobAccountName=The Azure Storage Blob Account Name" -p "source.blobContainerName=The Azure Storage Blob Container Name" -p "source.eventhubName=The Eventhubs Name" -p "source.namespaceName=The Eventhubs Namespace" -p "source.sharedAccessKey=The Share Access Key" -p "source.sharedAccessName=The Share Access Name" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `azure-eventhubs-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.azure-eventhubs-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: azure-eventhubs-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: azure-eventhubs-source + properties: + blobAccessKey: "The Azure Storage Blob Access Key" + blobAccountName: "The Azure Storage Blob Account Name" + blobContainerName: "The Azure Storage Blob Container Name" + eventhubName: "The Eventhubs Name" + namespaceName: "The Eventhubs Namespace" + sharedAccessKey: "The Share Access Key" + sharedAccessName: "The Share Access Name" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `azure-eventhubs-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f azure-eventhubs-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind azure-eventhubs-source -p "source.blobAccessKey=The Azure Storage Blob Access Key" -p "source.blobAccountName=The Azure Storage Blob Account Name" -p "source.blobContainerName=The Azure Storage Blob Container Name" -p "source.eventhubName=The Eventhubs Name" -p "source.namespaceName=The Eventhubs Namespace" -p "source.sharedAccessKey=The Share Access Key" -p "source.sharedAccessName=The Share Access Name" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/azure-storage-blob-sink.adoc b/docs/modules/ROOT/pages/azure-storage-blob-sink.adoc index c901b4167..2b5c76a6d 100644 --- a/docs/modules/ROOT/pages/azure-storage-blob-sink.adoc +++ b/docs/modules/ROOT/pages/azure-storage-blob-sink.adoc @@ -70,4 +70,68 @@ You can run the sink using the following command: ---- kubectl apply -f azure-storage-blob-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel azure-storage-blob-sink -p "sink.accessKey=The Access Key" -p "sink.accountName=The Account Name" -p "sink.containerName=The Container Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `azure-storage-blob-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.azure-storage-blob-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: azure-storage-blob-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: azure-storage-blob-sink + properties: + accessKey: "The Access Key" + accountName: "The Account Name" + containerName: "The Container Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `azure-storage-blob-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f azure-storage-blob-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic azure-storage-blob-sink -p "sink.accessKey=The Access Key" -p "sink.accountName=The Account Name" -p "sink.containerName=The Container Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/azure-storage-blob-source.adoc b/docs/modules/ROOT/pages/azure-storage-blob-source.adoc index 7a6d35c25..9a198d4a9 100644 --- a/docs/modules/ROOT/pages/azure-storage-blob-source.adoc +++ b/docs/modules/ROOT/pages/azure-storage-blob-source.adoc @@ -64,4 +64,68 @@ You can run the source using the following command: ---- kubectl apply -f azure-storage-blob-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind azure-storage-blob-source -p "source.accessKey=The Access Key" -p "source.accountName=The Account Name" -p "source.containerName=The Container Name" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `azure-storage-blob-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.azure-storage-blob-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: azure-storage-blob-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: azure-storage-blob-source + properties: + accessKey: "The Access Key" + accountName: "The Account Name" + containerName: "The Container Name" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `azure-storage-blob-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f azure-storage-blob-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind azure-storage-blob-source -p "source.accessKey=The Access Key" -p "source.accountName=The Account Name" -p "source.containerName=The Container Name" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/azure-storage-queue-sink.adoc b/docs/modules/ROOT/pages/azure-storage-queue-sink.adoc index 5e3092fed..7def719ac 100644 --- a/docs/modules/ROOT/pages/azure-storage-queue-sink.adoc +++ b/docs/modules/ROOT/pages/azure-storage-queue-sink.adoc @@ -71,4 +71,68 @@ You can run the sink using the following command: ---- kubectl apply -f azure-storage-queue-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel azure-storage-queue-sink -p "sink.accessKey=The Access Key" -p "sink.accountName=The Account Name" -p "sink.queueName=The Queue Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `azure-storage-queue-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.azure-storage-queue-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: azure-storage-queue-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: azure-storage-queue-sink + properties: + accessKey: "The Access Key" + accountName: "The Account Name" + queueName: "The Queue Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `azure-storage-queue-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f azure-storage-queue-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic azure-storage-queue-sink -p "sink.accessKey=The Access Key" -p "sink.accountName=The Account Name" -p "sink.queueName=The Queue Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/azure-storage-queue-source.adoc b/docs/modules/ROOT/pages/azure-storage-queue-source.adoc index 40d61ca4a..28b9cd2c8 100644 --- a/docs/modules/ROOT/pages/azure-storage-queue-source.adoc +++ b/docs/modules/ROOT/pages/azure-storage-queue-source.adoc @@ -64,4 +64,68 @@ You can run the source using the following command: ---- kubectl apply -f azure-storage-queue-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind azure-storage-queue-source -p "source.accessKey=The Access Key" -p "source.accountName=The Account Name" -p "source.queueName=The Queue Name" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `azure-storage-queue-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.azure-storage-queue-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: azure-storage-queue-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: azure-storage-queue-source + properties: + accessKey: "The Access Key" + accountName: "The Account Name" + queueName: "The Queue Name" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `azure-storage-queue-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f azure-storage-queue-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind azure-storage-queue-source -p "source.accessKey=The Access Key" -p "source.accountName=The Account Name" -p "source.queueName=The Queue Name" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/bitcoin-source.adoc b/docs/modules/ROOT/pages/bitcoin-source.adoc index 0fc79ce5f..3105925c3 100644 --- a/docs/modules/ROOT/pages/bitcoin-source.adoc +++ b/docs/modules/ROOT/pages/bitcoin-source.adoc @@ -57,4 +57,64 @@ You can run the source using the following command: ---- kubectl apply -f bitcoin-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind bitcoin-source channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `bitcoin-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.bitcoin-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: bitcoin-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: bitcoin-source + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `bitcoin-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f bitcoin-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind bitcoin-source kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/caffeine-action.adoc b/docs/modules/ROOT/pages/caffeine-action.adoc index 49dfea17f..b60fa984c 100644 --- a/docs/modules/ROOT/pages/caffeine-action.adoc +++ b/docs/modules/ROOT/pages/caffeine-action.adoc @@ -34,7 +34,7 @@ This section summarizes how the `caffeine-action` can be used in various context === Knative Action -The `caffeine-action` Kamelet can be used as intermediate step in a binding. +The `caffeine-action` Kamelet can be used as intermediate step in a Knative binding. .caffeine-action-binding.yaml [source,yaml] @@ -74,4 +74,71 @@ You can run the action using the following command: ---- kubectl apply -f caffeine-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step caffeine-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `caffeine-action` Kamelet can be used as intermediate step in a Kafka binding. + +.caffeine-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: caffeine-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: caffeine-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `caffeine-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f caffeine-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step caffeine-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/cassandra-sink.adoc b/docs/modules/ROOT/pages/cassandra-sink.adoc index 9c478ef9a..7e95f7838 100644 --- a/docs/modules/ROOT/pages/cassandra-sink.adoc +++ b/docs/modules/ROOT/pages/cassandra-sink.adoc @@ -72,4 +72,71 @@ You can run the sink using the following command: ---- kubectl apply -f cassandra-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel cassandra-sink -p "sink.host=localhost" -p "sink.keyspace=customers" -p "sink.password=The Password" -p sink.port=9042 -p "sink.preparedStatement=The Prepared Statement" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `cassandra-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.cassandra-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: cassandra-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: cassandra-sink + properties: + host: "localhost" + keyspace: "customers" + password: "The Password" + port: 9042 + preparedStatement: "The Prepared Statement" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `cassandra-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f cassandra-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic cassandra-sink -p "sink.host=localhost" -p "sink.keyspace=customers" -p "sink.password=The Password" -p sink.port=9042 -p "sink.preparedStatement=The Prepared Statement" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/cassandra-source.adoc b/docs/modules/ROOT/pages/cassandra-source.adoc index 4e084e107..4009f463a 100644 --- a/docs/modules/ROOT/pages/cassandra-source.adoc +++ b/docs/modules/ROOT/pages/cassandra-source.adoc @@ -71,4 +71,71 @@ You can run the source using the following command: ---- kubectl apply -f cassandra-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind cassandra-source -p "source.host=localhost" -p "source.keyspace=customers" -p "source.password=The Password" -p source.port=9042 -p "source.query=The Query" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `cassandra-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.cassandra-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: cassandra-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: cassandra-source + properties: + host: "localhost" + keyspace: "customers" + password: "The Password" + port: 9042 + query: "The Query" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `cassandra-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f cassandra-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind cassandra-source -p "source.host=localhost" -p "source.keyspace=customers" -p "source.password=The Password" -p source.port=9042 -p "source.query=The Query" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/chuck-norris-source.adoc b/docs/modules/ROOT/pages/chuck-norris-source.adoc index 83bd691e9..488ea9f8e 100644 --- a/docs/modules/ROOT/pages/chuck-norris-source.adoc +++ b/docs/modules/ROOT/pages/chuck-norris-source.adoc @@ -57,4 +57,64 @@ You can run the source using the following command: ---- kubectl apply -f chuck-norris-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind chuck-norris-source channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `chuck-norris-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.chuck-norris-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: chuck-norris-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: chuck-norris-source + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `chuck-norris-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f chuck-norris-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind chuck-norris-source kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/cron-source.adoc b/docs/modules/ROOT/pages/cron-source.adoc index 00e1e00bb..d18520eb0 100644 --- a/docs/modules/ROOT/pages/cron-source.adoc +++ b/docs/modules/ROOT/pages/cron-source.adoc @@ -61,4 +61,67 @@ You can run the source using the following command: ---- kubectl apply -f cron-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind cron-source -p "source.message=hello world" -p "source.schedule=0/3 10 * * * ?" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `cron-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.cron-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: cron-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: cron-source + properties: + message: "hello world" + schedule: "0/3 10 * * * ?" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `cron-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f cron-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind cron-source -p "source.message=hello world" -p "source.schedule=0/3 10 * * * ?" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/dns-dig-action.adoc b/docs/modules/ROOT/pages/dns-dig-action.adoc index 3020527b7..05d8d4538 100644 --- a/docs/modules/ROOT/pages/dns-dig-action.adoc +++ b/docs/modules/ROOT/pages/dns-dig-action.adoc @@ -27,7 +27,7 @@ This section summarizes how the `dns-dig-action` can be used in various contexts === Knative Action -The `dns-dig-action` Kamelet can be used as intermediate step in a binding. +The `dns-dig-action` Kamelet can be used as intermediate step in a Knative binding. .dns-dig-action-binding.yaml [source,yaml] @@ -67,4 +67,71 @@ You can run the action using the following command: ---- kubectl apply -f dns-dig-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step dns-dig-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `dns-dig-action` Kamelet can be used as intermediate step in a Kafka binding. + +.dns-dig-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: dns-dig-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: dns-dig-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `dns-dig-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f dns-dig-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step dns-dig-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/dns-ip-action.adoc b/docs/modules/ROOT/pages/dns-ip-action.adoc index 65f13963b..36640244c 100644 --- a/docs/modules/ROOT/pages/dns-ip-action.adoc +++ b/docs/modules/ROOT/pages/dns-ip-action.adoc @@ -23,7 +23,7 @@ This section summarizes how the `dns-ip-action` can be used in various contexts. === Knative Action -The `dns-ip-action` Kamelet can be used as intermediate step in a binding. +The `dns-ip-action` Kamelet can be used as intermediate step in a Knative binding. .dns-ip-action-binding.yaml [source,yaml] @@ -63,4 +63,71 @@ You can run the action using the following command: ---- kubectl apply -f dns-ip-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step dns-ip-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `dns-ip-action` Kamelet can be used as intermediate step in a Kafka binding. + +.dns-ip-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: dns-ip-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: dns-ip-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `dns-ip-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f dns-ip-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step dns-ip-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/dns-lookup-action.adoc b/docs/modules/ROOT/pages/dns-lookup-action.adoc index a3823866e..b55fbedd6 100644 --- a/docs/modules/ROOT/pages/dns-lookup-action.adoc +++ b/docs/modules/ROOT/pages/dns-lookup-action.adoc @@ -23,7 +23,7 @@ This section summarizes how the `dns-lookup-action` can be used in various conte === Knative Action -The `dns-lookup-action` Kamelet can be used as intermediate step in a binding. +The `dns-lookup-action` Kamelet can be used as intermediate step in a Knative binding. .dns-lookup-action-binding.yaml [source,yaml] @@ -63,4 +63,71 @@ You can run the action using the following command: ---- kubectl apply -f dns-lookup-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step dns-lookup-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `dns-lookup-action` Kamelet can be used as intermediate step in a Kafka binding. + +.dns-lookup-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: dns-lookup-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: dns-lookup-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `dns-lookup-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f dns-lookup-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step dns-lookup-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/dropbox-sink.adoc b/docs/modules/ROOT/pages/dropbox-sink.adoc index 0a0aa8c19..8786b3e07 100644 --- a/docs/modules/ROOT/pages/dropbox-sink.adoc +++ b/docs/modules/ROOT/pages/dropbox-sink.adoc @@ -70,4 +70,68 @@ You can run the sink using the following command: ---- kubectl apply -f dropbox-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel dropbox-sink -p "sink.accessToken=The Dropbox Access Token" -p "sink.clientIdentifier=The Client Identifier" -p "sink.remotePath=The Remote Path" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `dropbox-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.dropbox-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: dropbox-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: dropbox-sink + properties: + accessToken: "The Dropbox Access Token" + clientIdentifier: "The Client Identifier" + remotePath: "The Remote Path" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `dropbox-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f dropbox-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic dropbox-sink -p "sink.accessToken=The Dropbox Access Token" -p "sink.clientIdentifier=The Client Identifier" -p "sink.remotePath=The Remote Path" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/dropbox-source.adoc b/docs/modules/ROOT/pages/dropbox-source.adoc index 921d57a4a..1e0a21b19 100644 --- a/docs/modules/ROOT/pages/dropbox-source.adoc +++ b/docs/modules/ROOT/pages/dropbox-source.adoc @@ -66,4 +66,69 @@ You can run the source using the following command: ---- kubectl apply -f dropbox-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind dropbox-source -p "source.accessToken=The Dropbox Access Token" -p "source.clientIdentifier=The Client Identifier" -p "source.query=The Queries" -p "source.remotePath=The Remote Path" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `dropbox-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.dropbox-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: dropbox-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: dropbox-source + properties: + accessToken: "The Dropbox Access Token" + clientIdentifier: "The Client Identifier" + query: "The Queries" + remotePath: "The Remote Path" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `dropbox-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f dropbox-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind dropbox-source -p "source.accessToken=The Dropbox Access Token" -p "source.clientIdentifier=The Client Identifier" -p "source.query=The Queries" -p "source.remotePath=The Remote Path" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/earthquake-source.adoc b/docs/modules/ROOT/pages/earthquake-source.adoc index 4004e47d4..17ee3f718 100644 --- a/docs/modules/ROOT/pages/earthquake-source.adoc +++ b/docs/modules/ROOT/pages/earthquake-source.adoc @@ -58,4 +58,64 @@ You can run the source using the following command: ---- kubectl apply -f earthquake-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind earthquake-source channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `earthquake-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.earthquake-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: earthquake-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: earthquake-source + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `earthquake-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f earthquake-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind earthquake-source kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/elasticsearch-index-sink.adoc b/docs/modules/ROOT/pages/elasticsearch-index-sink.adoc index ece3b6b93..8597c30c7 100644 --- a/docs/modules/ROOT/pages/elasticsearch-index-sink.adoc +++ b/docs/modules/ROOT/pages/elasticsearch-index-sink.adoc @@ -75,4 +75,67 @@ You can run the sink using the following command: ---- kubectl apply -f elasticsearch-index-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel elasticsearch-index-sink -p "sink.clusterName=quickstart" -p "sink.hostAddresses=quickstart-es-http:9200" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `elasticsearch-index-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.elasticsearch-index-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: elasticsearch-index-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: elasticsearch-index-sink + properties: + clusterName: "quickstart" + hostAddresses: "quickstart-es-http:9200" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `elasticsearch-index-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f elasticsearch-index-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic elasticsearch-index-sink -p "sink.clusterName=quickstart" -p "sink.hostAddresses=quickstart-es-http:9200" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/elasticsearch-search-source.adoc b/docs/modules/ROOT/pages/elasticsearch-search-source.adoc index 72aa9619c..1eac213ee 100644 --- a/docs/modules/ROOT/pages/elasticsearch-search-source.adoc +++ b/docs/modules/ROOT/pages/elasticsearch-search-source.adoc @@ -69,4 +69,69 @@ You can run the source using the following command: ---- kubectl apply -f elasticsearch-search-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind elasticsearch-search-source -p "source.clusterName=The ElasticSearch Cluster Name" -p "source.hostAddresses=The Host Addresses" -p "source.indexName=The Index in ElasticSearch" -p "source.query=The Query" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `elasticsearch-search-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.elasticsearch-search-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: elasticsearch-search-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: elasticsearch-search-source + properties: + clusterName: "The ElasticSearch Cluster Name" + hostAddresses: "The Host Addresses" + indexName: "The Index in ElasticSearch" + query: "The Query" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `elasticsearch-search-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f elasticsearch-search-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind elasticsearch-search-source -p "source.clusterName=The ElasticSearch Cluster Name" -p "source.hostAddresses=The Host Addresses" -p "source.indexName=The Index in ElasticSearch" -p "source.query=The Query" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/exec-sink.adoc b/docs/modules/ROOT/pages/exec-sink.adoc index 23b47e283..db6d73b1f 100644 --- a/docs/modules/ROOT/pages/exec-sink.adoc +++ b/docs/modules/ROOT/pages/exec-sink.adoc @@ -65,4 +65,66 @@ You can run the sink using the following command: ---- kubectl apply -f exec-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel exec-sink -p "sink.executable=The Executable Command" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `exec-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.exec-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: exec-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: exec-sink + properties: + executable: "The Executable Command" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `exec-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f exec-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic exec-sink -p "sink.executable=The Executable Command" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/extract-field-action.adoc b/docs/modules/ROOT/pages/extract-field-action.adoc index e7191bbd9..65a73aec3 100644 --- a/docs/modules/ROOT/pages/extract-field-action.adoc +++ b/docs/modules/ROOT/pages/extract-field-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `extract-field-action` can be used in various co === Knative Action -The `extract-field-action` Kamelet can be used as intermediate step in a binding. +The `extract-field-action` Kamelet can be used as intermediate step in a Knative binding. .extract-field-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f extract-field-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step extract-field-action -p "step-0.field=The Field" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `extract-field-action` Kamelet can be used as intermediate step in a Kafka binding. + +.extract-field-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: extract-field-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: extract-field-action + properties: + field: "The Field" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `extract-field-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f extract-field-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step extract-field-action -p "step-0.field=The Field" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/fhir-source.adoc b/docs/modules/ROOT/pages/fhir-source.adoc index 4176d9326..4eec9bc14 100644 --- a/docs/modules/ROOT/pages/fhir-source.adoc +++ b/docs/modules/ROOT/pages/fhir-source.adoc @@ -67,4 +67,68 @@ You can run the source using the following command: ---- kubectl apply -f fhir-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind fhir-source -p "source.password=The Password" -p "source.serverUrl=The Server URL" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `fhir-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.fhir-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: fhir-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: fhir-source + properties: + password: "The Password" + serverUrl: "The Server URL" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `fhir-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f fhir-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind fhir-source -p "source.password=The Password" -p "source.serverUrl=The Server URL" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/file-watch-source.adoc b/docs/modules/ROOT/pages/file-watch-source.adoc index 1b6927231..805cabef9 100644 --- a/docs/modules/ROOT/pages/file-watch-source.adoc +++ b/docs/modules/ROOT/pages/file-watch-source.adoc @@ -60,4 +60,66 @@ You can run the source using the following command: ---- kubectl apply -f file-watch-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind file-watch-source -p "source.path=The Path to Watch" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `file-watch-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.file-watch-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: file-watch-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: file-watch-source + properties: + path: "The Path to Watch" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `file-watch-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f file-watch-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind file-watch-source -p "source.path=The Path to Watch" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/ftp-sink.adoc b/docs/modules/ROOT/pages/ftp-sink.adoc index 67ff5c8b4..2f49216e8 100644 --- a/docs/modules/ROOT/pages/ftp-sink.adoc +++ b/docs/modules/ROOT/pages/ftp-sink.adoc @@ -74,4 +74,69 @@ You can run the sink using the following command: ---- kubectl apply -f ftp-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel ftp-sink -p "sink.directoryName=The Directory Name" -p "sink.host=The Host" -p "sink.password=The Password" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `ftp-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.ftp-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: ftp-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: ftp-sink + properties: + directoryName: "The Directory Name" + host: "The Host" + password: "The Password" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `ftp-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f ftp-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic ftp-sink -p "sink.directoryName=The Directory Name" -p "sink.host=The Host" -p "sink.password=The Password" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/ftp-source.adoc b/docs/modules/ROOT/pages/ftp-source.adoc index c58f16be3..65f91cbd2 100644 --- a/docs/modules/ROOT/pages/ftp-source.adoc +++ b/docs/modules/ROOT/pages/ftp-source.adoc @@ -69,4 +69,69 @@ You can run the source using the following command: ---- kubectl apply -f ftp-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind ftp-source -p "source.directoryName=The Directory Name" -p "source.host=The Host" -p "source.password=The Password" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `ftp-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.ftp-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: ftp-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: ftp-source + properties: + directoryName: "The Directory Name" + host: "The Host" + password: "The Password" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `ftp-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f ftp-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind ftp-source -p "source.directoryName=The Directory Name" -p "source.host=The Host" -p "source.password=The Password" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/ftps-sink.adoc b/docs/modules/ROOT/pages/ftps-sink.adoc index 99197dcb5..11d929376 100644 --- a/docs/modules/ROOT/pages/ftps-sink.adoc +++ b/docs/modules/ROOT/pages/ftps-sink.adoc @@ -74,4 +74,69 @@ You can run the sink using the following command: ---- kubectl apply -f ftps-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel ftps-sink -p "sink.directoryName=The Directory Name" -p "sink.host=The Host" -p "sink.password=The Password" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `ftps-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.ftps-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: ftps-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: ftps-sink + properties: + directoryName: "The Directory Name" + host: "The Host" + password: "The Password" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `ftps-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f ftps-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic ftps-sink -p "sink.directoryName=The Directory Name" -p "sink.host=The Host" -p "sink.password=The Password" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/ftps-source.adoc b/docs/modules/ROOT/pages/ftps-source.adoc index 54c100ab4..d16a18c0c 100644 --- a/docs/modules/ROOT/pages/ftps-source.adoc +++ b/docs/modules/ROOT/pages/ftps-source.adoc @@ -69,4 +69,69 @@ You can run the source using the following command: ---- kubectl apply -f ftps-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind ftps-source -p "source.directoryName=The Directory Name" -p "source.host=The Host" -p "source.password=The Password" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `ftps-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.ftps-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: ftps-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: ftps-source + properties: + directoryName: "The Directory Name" + host: "The Host" + password: "The Password" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `ftps-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f ftps-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind ftps-source -p "source.directoryName=The Directory Name" -p "source.host=The Host" -p "source.password=The Password" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/github-source.adoc b/docs/modules/ROOT/pages/github-source.adoc index e699816be..b58ad6b0b 100644 --- a/docs/modules/ROOT/pages/github-source.adoc +++ b/docs/modules/ROOT/pages/github-source.adoc @@ -64,4 +64,68 @@ You can run the source using the following command: ---- kubectl apply -f github-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind github-source -p "source.oauthToken=The OAuth Token" -p "source.repoName=The Repository Name" -p "source.repoOwner=The Repository Owner" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `github-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.github-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: github-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: github-source + properties: + oauthToken: "The OAuth Token" + repoName: "The Repository Name" + repoOwner: "The Repository Owner" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `github-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f github-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind github-source -p "source.oauthToken=The OAuth Token" -p "source.repoName=The Repository Name" -p "source.repoOwner=The Repository Owner" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/google-calendar-source.adoc b/docs/modules/ROOT/pages/google-calendar-source.adoc index 32eb7a935..06bf00d02 100644 --- a/docs/modules/ROOT/pages/google-calendar-source.adoc +++ b/docs/modules/ROOT/pages/google-calendar-source.adoc @@ -74,4 +74,72 @@ You can run the source using the following command: ---- kubectl apply -f google-calendar-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind google-calendar-source -p "source.accessToken=The Access Token" -p "source.applicationName=The Application name" -p "source.calendarId=The Calendar ID" -p "source.clientId=The Client Id" -p "source.clientSecret=The Client Secret" -p "source.index=The Index" -p "source.refreshToken=The Refresh Token" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `google-calendar-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.google-calendar-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: google-calendar-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: google-calendar-source + properties: + accessToken: "The Access Token" + applicationName: "The Application name" + calendarId: "The Calendar ID" + clientId: "The Client Id" + clientSecret: "The Client Secret" + index: "The Index" + refreshToken: "The Refresh Token" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `google-calendar-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f google-calendar-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind google-calendar-source -p "source.accessToken=The Access Token" -p "source.applicationName=The Application name" -p "source.calendarId=The Calendar ID" -p "source.clientId=The Client Id" -p "source.clientSecret=The Client Secret" -p "source.index=The Index" -p "source.refreshToken=The Refresh Token" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/google-mail-source.adoc b/docs/modules/ROOT/pages/google-mail-source.adoc index 4d4572b42..2162b5cef 100644 --- a/docs/modules/ROOT/pages/google-mail-source.adoc +++ b/docs/modules/ROOT/pages/google-mail-source.adoc @@ -73,4 +73,71 @@ You can run the source using the following command: ---- kubectl apply -f google-mail-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind google-mail-source -p "source.accessToken=The Access Token" -p "source.applicationName=The Application name" -p "source.clientId=The Client ID" -p "source.clientSecret=The Client Secret" -p "source.index=The Index" -p "source.refreshToken=The Refresh Token" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `google-mail-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.google-mail-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: google-mail-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: google-mail-source + properties: + accessToken: "The Access Token" + applicationName: "The Application name" + clientId: "The Client ID" + clientSecret: "The Client Secret" + index: "The Index" + refreshToken: "The Refresh Token" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `google-mail-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f google-mail-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind google-mail-source -p "source.accessToken=The Access Token" -p "source.applicationName=The Application name" -p "source.clientId=The Client ID" -p "source.clientSecret=The Client Secret" -p "source.index=The Index" -p "source.refreshToken=The Refresh Token" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/google-sheets-source.adoc b/docs/modules/ROOT/pages/google-sheets-source.adoc index 5e2a80f4d..f8cee5ac1 100644 --- a/docs/modules/ROOT/pages/google-sheets-source.adoc +++ b/docs/modules/ROOT/pages/google-sheets-source.adoc @@ -74,4 +74,72 @@ You can run the source using the following command: ---- kubectl apply -f google-sheets-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind google-sheets-source -p "source.accessToken=The Access Token" -p "source.applicationName=The Application name" -p "source.clientId=The Client Id" -p "source.clientSecret=The Client Secret" -p "source.index=The Index" -p "source.refreshToken=The Refresh Token" -p "source.spreadsheetId=The Spreadsheet ID" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `google-sheets-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.google-sheets-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: google-sheets-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: google-sheets-source + properties: + accessToken: "The Access Token" + applicationName: "The Application name" + clientId: "The Client Id" + clientSecret: "The Client Secret" + index: "The Index" + refreshToken: "The Refresh Token" + spreadsheetId: "The Spreadsheet ID" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `google-sheets-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f google-sheets-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind google-sheets-source -p "source.accessToken=The Access Token" -p "source.applicationName=The Application name" -p "source.clientId=The Client Id" -p "source.clientSecret=The Client Secret" -p "source.index=The Index" -p "source.refreshToken=The Refresh Token" -p "source.spreadsheetId=The Spreadsheet ID" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/has-header-filter-action.adoc b/docs/modules/ROOT/pages/has-header-filter-action.adoc index 5693d042e..d3beb5c06 100644 --- a/docs/modules/ROOT/pages/has-header-filter-action.adoc +++ b/docs/modules/ROOT/pages/has-header-filter-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `has-header-filter-action` can be used in variou === Knative Action -The `has-header-filter-action` Kamelet can be used as intermediate step in a binding. +The `has-header-filter-action` Kamelet can be used as intermediate step in a Knative binding. .has-header-filter-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f has-header-filter-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step has-header-filter-action -p "step-0.name=headerName" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `has-header-filter-action` Kamelet can be used as intermediate step in a Kafka binding. + +.has-header-filter-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: has-header-filter-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: has-header-filter-action + properties: + name: "headerName" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `has-header-filter-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f has-header-filter-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step has-header-filter-action -p "step-0.name=headerName" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/header-matches-filter-action.adoc b/docs/modules/ROOT/pages/header-matches-filter-action.adoc index 4ab3efff4..9c4b4281c 100644 --- a/docs/modules/ROOT/pages/header-matches-filter-action.adoc +++ b/docs/modules/ROOT/pages/header-matches-filter-action.adoc @@ -25,7 +25,7 @@ This section summarizes how the `header-matches-filter-action` can be used in va === Knative Action -The `header-matches-filter-action` Kamelet can be used as intermediate step in a binding. +The `header-matches-filter-action` Kamelet can be used as intermediate step in a Knative binding. .header-matches-filter-action-binding.yaml [source,yaml] @@ -67,4 +67,73 @@ You can run the action using the following command: ---- kubectl apply -f header-matches-filter-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step header-matches-filter-action -p "step-0.regex=The Regex" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `header-matches-filter-action` Kamelet can be used as intermediate step in a Kafka binding. + +.header-matches-filter-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: header-matches-filter-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: header-matches-filter-action + properties: + regex: "The Regex" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `header-matches-filter-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f header-matches-filter-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step header-matches-filter-action -p "step-0.regex=The Regex" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/hoist-field-action.adoc b/docs/modules/ROOT/pages/hoist-field-action.adoc index a388e8d4e..b6f5a672f 100644 --- a/docs/modules/ROOT/pages/hoist-field-action.adoc +++ b/docs/modules/ROOT/pages/hoist-field-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `hoist-field-action` can be used in various cont === Knative Action -The `hoist-field-action` Kamelet can be used as intermediate step in a binding. +The `hoist-field-action` Kamelet can be used as intermediate step in a Knative binding. .hoist-field-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f hoist-field-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step hoist-field-action -p "step-0.field=The Field" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `hoist-field-action` Kamelet can be used as intermediate step in a Kafka binding. + +.hoist-field-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: hoist-field-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: hoist-field-action + properties: + field: "The Field" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `hoist-field-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f hoist-field-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step hoist-field-action -p "step-0.field=The Field" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/http-secured-sink.adoc b/docs/modules/ROOT/pages/http-secured-sink.adoc index 2e8122672..ec6fd1465 100644 --- a/docs/modules/ROOT/pages/http-secured-sink.adoc +++ b/docs/modules/ROOT/pages/http-secured-sink.adoc @@ -64,4 +64,66 @@ You can run the sink using the following command: ---- kubectl apply -f http-secured-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel http-secured-sink -p "sink.url=https://my-service/path" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `http-secured-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.http-secured-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: http-secured-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: http-secured-sink + properties: + url: "https://my-service/path" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `http-secured-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f http-secured-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic http-secured-sink -p "sink.url=https://my-service/path" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/http-secured-source.adoc b/docs/modules/ROOT/pages/http-secured-source.adoc index aa39bb00a..5757c67ec 100644 --- a/docs/modules/ROOT/pages/http-secured-source.adoc +++ b/docs/modules/ROOT/pages/http-secured-source.adoc @@ -65,4 +65,66 @@ You can run the source using the following command: ---- kubectl apply -f http-secured-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind http-secured-source -p "source.url=https://gist.githubusercontent.com/nicolaferraro/e3c72ace3c751f9f88273896611ce5fe/raw/3b6f54060bacb56b6719b7386a4645cb59ad6cc1/quote.json" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `http-secured-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.http-secured-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: http-secured-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: http-secured-source + properties: + url: "https://gist.githubusercontent.com/nicolaferraro/e3c72ace3c751f9f88273896611ce5fe/raw/3b6f54060bacb56b6719b7386a4645cb59ad6cc1/quote.json" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `http-secured-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f http-secured-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind http-secured-source -p "source.url=https://gist.githubusercontent.com/nicolaferraro/e3c72ace3c751f9f88273896611ce5fe/raw/3b6f54060bacb56b6719b7386a4645cb59ad6cc1/quote.json" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/http-sink.adoc b/docs/modules/ROOT/pages/http-sink.adoc index 0ffd51450..8bfdafb3d 100644 --- a/docs/modules/ROOT/pages/http-sink.adoc +++ b/docs/modules/ROOT/pages/http-sink.adoc @@ -60,4 +60,66 @@ You can run the sink using the following command: ---- kubectl apply -f http-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel http-sink -p "sink.url=https://my-service/path" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `http-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.http-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: http-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: http-sink + properties: + url: "https://my-service/path" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `http-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f http-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic http-sink -p "sink.url=https://my-service/path" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/http-source.adoc b/docs/modules/ROOT/pages/http-source.adoc index 468817412..ebb1b0bcb 100644 --- a/docs/modules/ROOT/pages/http-source.adoc +++ b/docs/modules/ROOT/pages/http-source.adoc @@ -61,4 +61,66 @@ You can run the source using the following command: ---- kubectl apply -f http-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind http-source -p "source.url=https://gist.githubusercontent.com/nicolaferraro/e3c72ace3c751f9f88273896611ce5fe/raw/3b6f54060bacb56b6719b7386a4645cb59ad6cc1/quote.json" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `http-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.http-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: http-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: http-source + properties: + url: "https://gist.githubusercontent.com/nicolaferraro/e3c72ace3c751f9f88273896611ce5fe/raw/3b6f54060bacb56b6719b7386a4645cb59ad6cc1/quote.json" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `http-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f http-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind http-source -p "source.url=https://gist.githubusercontent.com/nicolaferraro/e3c72ace3c751f9f88273896611ce5fe/raw/3b6f54060bacb56b6719b7386a4645cb59ad6cc1/quote.json" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/infinispan-source.adoc b/docs/modules/ROOT/pages/infinispan-source.adoc index c0007e362..101cd7ec4 100644 --- a/docs/modules/ROOT/pages/infinispan-source.adoc +++ b/docs/modules/ROOT/pages/infinispan-source.adoc @@ -68,4 +68,68 @@ You can run the source using the following command: ---- kubectl apply -f infinispan-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind infinispan-source -p "source.hosts=The Hosts" -p "source.password=The Password" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `infinispan-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.infinispan-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: infinispan-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: infinispan-source + properties: + hosts: "The Hosts" + password: "The Password" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `infinispan-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f infinispan-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind infinispan-source -p "source.hosts=The Hosts" -p "source.password=The Password" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/insert-field-action.adoc b/docs/modules/ROOT/pages/insert-field-action.adoc index bfb9e28ae..1c5442852 100644 --- a/docs/modules/ROOT/pages/insert-field-action.adoc +++ b/docs/modules/ROOT/pages/insert-field-action.adoc @@ -25,7 +25,7 @@ This section summarizes how the `insert-field-action` can be used in various con === Knative Action -The `insert-field-action` Kamelet can be used as intermediate step in a binding. +The `insert-field-action` Kamelet can be used as intermediate step in a Knative binding. .insert-field-action-binding.yaml [source,yaml] @@ -68,4 +68,74 @@ You can run the action using the following command: ---- kubectl apply -f insert-field-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step insert-field-action -p "step-0.field=The Field" -p "step-0.value=The Value" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `insert-field-action` Kamelet can be used as intermediate step in a Kafka binding. + +.insert-field-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: insert-field-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: insert-field-action + properties: + field: "The Field" + value: "The Value" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `insert-field-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f insert-field-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step insert-field-action -p "step-0.field=The Field" -p "step-0.value=The Value" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/insert-header-action.adoc b/docs/modules/ROOT/pages/insert-header-action.adoc index ba395cc07..d8ed19a5d 100644 --- a/docs/modules/ROOT/pages/insert-header-action.adoc +++ b/docs/modules/ROOT/pages/insert-header-action.adoc @@ -25,7 +25,7 @@ This section summarizes how the `insert-header-action` can be used in various co === Knative Action -The `insert-header-action` Kamelet can be used as intermediate step in a binding. +The `insert-header-action` Kamelet can be used as intermediate step in a Knative binding. .insert-header-action-binding.yaml [source,yaml] @@ -68,4 +68,74 @@ You can run the action using the following command: ---- kubectl apply -f insert-header-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step insert-header-action -p "step-0.name=headername" -p "step-0.value=The Value" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `insert-header-action` Kamelet can be used as intermediate step in a Kafka binding. + +.insert-header-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: insert-header-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: insert-header-action + properties: + name: "headername" + value: "The Value" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `insert-header-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f insert-header-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step insert-header-action -p "step-0.name=headername" -p "step-0.value=The Value" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/is-tombstone-filter-action.adoc b/docs/modules/ROOT/pages/is-tombstone-filter-action.adoc index 2c26e302d..79b427a47 100644 --- a/docs/modules/ROOT/pages/is-tombstone-filter-action.adoc +++ b/docs/modules/ROOT/pages/is-tombstone-filter-action.adoc @@ -17,7 +17,7 @@ This section summarizes how the `is-tombstone-filter-action` can be used in vari === Knative Action -The `is-tombstone-filter-action` Kamelet can be used as intermediate step in a binding. +The `is-tombstone-filter-action` Kamelet can be used as intermediate step in a Knative binding. .is-tombstone-filter-action-binding.yaml [source,yaml] @@ -57,4 +57,71 @@ You can run the action using the following command: ---- kubectl apply -f is-tombstone-filter-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step is-tombstone-filter-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `is-tombstone-filter-action` Kamelet can be used as intermediate step in a Kafka binding. + +.is-tombstone-filter-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: is-tombstone-filter-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: is-tombstone-filter-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `is-tombstone-filter-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f is-tombstone-filter-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step is-tombstone-filter-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/jira-source.adoc b/docs/modules/ROOT/pages/jira-source.adoc index 96db9feb4..173871672 100644 --- a/docs/modules/ROOT/pages/jira-source.adoc +++ b/docs/modules/ROOT/pages/jira-source.adoc @@ -64,4 +64,68 @@ You can run the source using the following command: ---- kubectl apply -f jira-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind jira-source -p "source.jiraUrl=http://my_jira.com:8081" -p "source.password=The Password" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `jira-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.jira-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: jira-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: jira-source + properties: + jiraUrl: "http://my_jira.com:8081" + password: "The Password" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `jira-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f jira-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind jira-source -p "source.jiraUrl=http://my_jira.com:8081" -p "source.password=The Password" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/jms-amqp-10-sink.adoc b/docs/modules/ROOT/pages/jms-amqp-10-sink.adoc index f8d8e4153..a86bbfd4b 100644 --- a/docs/modules/ROOT/pages/jms-amqp-10-sink.adoc +++ b/docs/modules/ROOT/pages/jms-amqp-10-sink.adoc @@ -62,4 +62,67 @@ You can run the sink using the following command: ---- kubectl apply -f jms-amqp-10-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel jms-amqp-10-sink -p "sink.brokerURL=amqp://k3s-node-master.usersys.redhat.com:31616" -p "sink.destinationName=The Destination Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `jms-amqp-10-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.jms-amqp-10-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: jms-amqp-10-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: jms-amqp-10-sink + properties: + brokerURL: "amqp://k3s-node-master.usersys.redhat.com:31616" + destinationName: "The Destination Name" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `jms-amqp-10-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f jms-amqp-10-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic jms-amqp-10-sink -p "sink.brokerURL=amqp://k3s-node-master.usersys.redhat.com:31616" -p "sink.destinationName=The Destination Name" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/jms-amqp-10-source.adoc b/docs/modules/ROOT/pages/jms-amqp-10-source.adoc index 3a24e6077..11546690f 100644 --- a/docs/modules/ROOT/pages/jms-amqp-10-source.adoc +++ b/docs/modules/ROOT/pages/jms-amqp-10-source.adoc @@ -62,4 +62,67 @@ You can run the source using the following command: ---- kubectl apply -f jms-amqp-10-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind jms-amqp-10-source -p "source.brokerURL=amqp://k3s-node-master.usersys.redhat.com:31616" -p "source.destinationName=The Destination Name" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `jms-amqp-10-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.jms-amqp-10-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: jms-amqp-10-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: jms-amqp-10-source + properties: + brokerURL: "amqp://k3s-node-master.usersys.redhat.com:31616" + destinationName: "The Destination Name" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `jms-amqp-10-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f jms-amqp-10-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind jms-amqp-10-source -p "source.brokerURL=amqp://k3s-node-master.usersys.redhat.com:31616" -p "source.destinationName=The Destination Name" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/jms-apache-artemis-sink.adoc b/docs/modules/ROOT/pages/jms-apache-artemis-sink.adoc index 0cff85887..eecd505e7 100644 --- a/docs/modules/ROOT/pages/jms-apache-artemis-sink.adoc +++ b/docs/modules/ROOT/pages/jms-apache-artemis-sink.adoc @@ -62,4 +62,67 @@ You can run the sink using the following command: ---- kubectl apply -f jms-apache-artemis-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel jms-apache-artemis-sink -p "sink.brokerURL=tcp://my-host:61616" -p "sink.destinationName=person" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `jms-apache-artemis-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.jms-apache-artemis-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: jms-apache-artemis-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: jms-apache-artemis-sink + properties: + brokerURL: "tcp://my-host:61616" + destinationName: "person" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `jms-apache-artemis-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f jms-apache-artemis-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic jms-apache-artemis-sink -p "sink.brokerURL=tcp://my-host:61616" -p "sink.destinationName=person" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/jms-apache-artemis-source.adoc b/docs/modules/ROOT/pages/jms-apache-artemis-source.adoc index e1b39b8fe..b24467374 100644 --- a/docs/modules/ROOT/pages/jms-apache-artemis-source.adoc +++ b/docs/modules/ROOT/pages/jms-apache-artemis-source.adoc @@ -62,4 +62,67 @@ You can run the source using the following command: ---- kubectl apply -f jms-apache-artemis-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind jms-apache-artemis-source -p "source.brokerURL=tcp://k3s-node-master.usersys.redhat.com:31616" -p "source.destinationName=The Destination Name" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `jms-apache-artemis-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.jms-apache-artemis-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: jms-apache-artemis-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: jms-apache-artemis-source + properties: + brokerURL: "tcp://k3s-node-master.usersys.redhat.com:31616" + destinationName: "The Destination Name" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `jms-apache-artemis-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f jms-apache-artemis-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind jms-apache-artemis-source -p "source.brokerURL=tcp://k3s-node-master.usersys.redhat.com:31616" -p "source.destinationName=The Destination Name" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/json-deserialize-action.adoc b/docs/modules/ROOT/pages/json-deserialize-action.adoc index ae6ef0922..df0136377 100644 --- a/docs/modules/ROOT/pages/json-deserialize-action.adoc +++ b/docs/modules/ROOT/pages/json-deserialize-action.adoc @@ -17,7 +17,7 @@ This section summarizes how the `json-deserialize-action` can be used in various === Knative Action -The `json-deserialize-action` Kamelet can be used as intermediate step in a binding. +The `json-deserialize-action` Kamelet can be used as intermediate step in a Knative binding. .json-deserialize-action-binding.yaml [source,yaml] @@ -57,4 +57,71 @@ You can run the action using the following command: ---- kubectl apply -f json-deserialize-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step json-deserialize-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `json-deserialize-action` Kamelet can be used as intermediate step in a Kafka binding. + +.json-deserialize-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: json-deserialize-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: json-deserialize-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `json-deserialize-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f json-deserialize-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step json-deserialize-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/json-serialize-action.adoc b/docs/modules/ROOT/pages/json-serialize-action.adoc index f761d18b7..578974620 100644 --- a/docs/modules/ROOT/pages/json-serialize-action.adoc +++ b/docs/modules/ROOT/pages/json-serialize-action.adoc @@ -17,7 +17,7 @@ This section summarizes how the `json-serialize-action` can be used in various c === Knative Action -The `json-serialize-action` Kamelet can be used as intermediate step in a binding. +The `json-serialize-action` Kamelet can be used as intermediate step in a Knative binding. .json-serialize-action-binding.yaml [source,yaml] @@ -57,4 +57,71 @@ You can run the action using the following command: ---- kubectl apply -f json-serialize-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step json-serialize-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `json-serialize-action` Kamelet can be used as intermediate step in a Kafka binding. + +.json-serialize-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: json-serialize-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: json-serialize-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `json-serialize-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f json-serialize-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step json-serialize-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/kafka-manual-commit-action.adoc b/docs/modules/ROOT/pages/kafka-manual-commit-action.adoc index 65513dad3..b39dd7465 100644 --- a/docs/modules/ROOT/pages/kafka-manual-commit-action.adoc +++ b/docs/modules/ROOT/pages/kafka-manual-commit-action.adoc @@ -17,7 +17,7 @@ This section summarizes how the `kafka-manual-commit-action` can be used in vari === Knative Action -The `kafka-manual-commit-action` Kamelet can be used as intermediate step in a binding. +The `kafka-manual-commit-action` Kamelet can be used as intermediate step in a Knative binding. .kafka-manual-commit-action-binding.yaml [source,yaml] @@ -57,4 +57,71 @@ You can run the action using the following command: ---- kubectl apply -f kafka-manual-commit-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step kafka-manual-commit-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `kafka-manual-commit-action` Kamelet can be used as intermediate step in a Kafka binding. + +.kafka-manual-commit-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: kafka-manual-commit-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: kafka-manual-commit-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `kafka-manual-commit-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f kafka-manual-commit-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step kafka-manual-commit-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/kafka-not-secured-sink.adoc b/docs/modules/ROOT/pages/kafka-not-secured-sink.adoc index c1ad22b46..82292d44a 100644 --- a/docs/modules/ROOT/pages/kafka-not-secured-sink.adoc +++ b/docs/modules/ROOT/pages/kafka-not-secured-sink.adoc @@ -69,4 +69,67 @@ You can run the sink using the following command: ---- kubectl apply -f kafka-not-secured-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel kafka-not-secured-sink -p "sink.brokers=The Brokers" -p "sink.topic=The Topic Names" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `kafka-not-secured-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.kafka-not-secured-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: kafka-not-secured-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: kafka-not-secured-sink + properties: + brokers: "The Brokers" + topic: "The Topic Names" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `kafka-not-secured-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f kafka-not-secured-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic kafka-not-secured-sink -p "sink.brokers=The Brokers" -p "sink.topic=The Topic Names" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/kafka-not-secured-source.adoc b/docs/modules/ROOT/pages/kafka-not-secured-source.adoc index 7317f22f1..71049dd52 100644 --- a/docs/modules/ROOT/pages/kafka-not-secured-source.adoc +++ b/docs/modules/ROOT/pages/kafka-not-secured-source.adoc @@ -65,4 +65,67 @@ You can run the source using the following command: ---- kubectl apply -f kafka-not-secured-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka-not-secured-source -p "source.brokers=The Brokers" -p "source.topic=The Topic Names" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `kafka-not-secured-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.kafka-not-secured-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: kafka-not-secured-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: kafka-not-secured-source + properties: + brokers: "The Brokers" + topic: "The Topic Names" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `kafka-not-secured-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f kafka-not-secured-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka-not-secured-source -p "source.brokers=The Brokers" -p "source.topic=The Topic Names" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/kafka-sink.adoc b/docs/modules/ROOT/pages/kafka-sink.adoc index a40aa5f2e..6702b19f5 100644 --- a/docs/modules/ROOT/pages/kafka-sink.adoc +++ b/docs/modules/ROOT/pages/kafka-sink.adoc @@ -75,4 +75,69 @@ You can run the sink using the following command: ---- kubectl apply -f kafka-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel kafka-sink -p "sink.brokers=The Brokers" -p "sink.password=The Password" -p "sink.topic=The Topic Names" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `kafka-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.kafka-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: kafka-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: kafka-sink + properties: + brokers: "The Brokers" + password: "The Password" + topic: "The Topic Names" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `kafka-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f kafka-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic kafka-sink -p "sink.brokers=The Brokers" -p "sink.password=The Password" -p "sink.topic=The Topic Names" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/kafka-source.adoc b/docs/modules/ROOT/pages/kafka-source.adoc index c85e436f1..3ba13ca47 100644 --- a/docs/modules/ROOT/pages/kafka-source.adoc +++ b/docs/modules/ROOT/pages/kafka-source.adoc @@ -71,4 +71,69 @@ You can run the source using the following command: ---- kubectl apply -f kafka-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka-source -p "source.brokers=The Brokers" -p "source.password=The Password" -p "source.topic=The Topic Names" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `kafka-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.kafka-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: kafka-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: kafka-source + properties: + brokers: "The Brokers" + password: "The Password" + topic: "The Topic Names" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `kafka-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f kafka-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka-source -p "source.brokers=The Brokers" -p "source.password=The Password" -p "source.topic=The Topic Names" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/mail-imap-source.adoc b/docs/modules/ROOT/pages/mail-imap-source.adoc index 9d79b84cc..ac8ffbedd 100644 --- a/docs/modules/ROOT/pages/mail-imap-source.adoc +++ b/docs/modules/ROOT/pages/mail-imap-source.adoc @@ -66,4 +66,68 @@ You can run the source using the following command: ---- kubectl apply -f mail-imap-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mail-imap-source -p "source.host=imap.gmail.com" -p "source.password=The Password" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `mail-imap-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.mail-imap-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: mail-imap-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: mail-imap-source + properties: + host: "imap.gmail.com" + password: "The Password" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `mail-imap-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f mail-imap-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mail-imap-source -p "source.host=imap.gmail.com" -p "source.password=The Password" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/mariadb-sink.adoc b/docs/modules/ROOT/pages/mariadb-sink.adoc index 44fb18bf9..f783b6488 100644 --- a/docs/modules/ROOT/pages/mariadb-sink.adoc +++ b/docs/modules/ROOT/pages/mariadb-sink.adoc @@ -80,4 +80,70 @@ You can run the sink using the following command: ---- kubectl apply -f mariadb-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel mariadb-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `mariadb-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.mariadb-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: mariadb-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: mariadb-sink + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `mariadb-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f mariadb-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic mariadb-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/mariadb-source.adoc b/docs/modules/ROOT/pages/mariadb-source.adoc index 61dc5bfb1..b5ec849c0 100644 --- a/docs/modules/ROOT/pages/mariadb-source.adoc +++ b/docs/modules/ROOT/pages/mariadb-source.adoc @@ -73,4 +73,70 @@ You can run the source using the following command: ---- kubectl apply -f mariadb-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mariadb-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `mariadb-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.mariadb-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: mariadb-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: mariadb-source + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `mariadb-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f mariadb-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mariadb-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/mask-field-action.adoc b/docs/modules/ROOT/pages/mask-field-action.adoc index 45ab59b32..a348d0ab5 100644 --- a/docs/modules/ROOT/pages/mask-field-action.adoc +++ b/docs/modules/ROOT/pages/mask-field-action.adoc @@ -25,7 +25,7 @@ This section summarizes how the `mask-field-action` can be used in various conte === Knative Action -The `mask-field-action` Kamelet can be used as intermediate step in a binding. +The `mask-field-action` Kamelet can be used as intermediate step in a Knative binding. .mask-field-action-binding.yaml [source,yaml] @@ -68,4 +68,74 @@ You can run the action using the following command: ---- kubectl apply -f mask-field-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step mask-field-action -p "step-0.fields=The Fields" -p "step-0.replacement=The Replacement" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `mask-field-action` Kamelet can be used as intermediate step in a Kafka binding. + +.mask-field-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: mask-field-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: mask-field-action + properties: + fields: "The Fields" + replacement: "The Replacement" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `mask-field-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f mask-field-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step mask-field-action -p "step-0.fields=The Fields" -p "step-0.replacement=The Replacement" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/message-timestamp-router-action.adoc b/docs/modules/ROOT/pages/message-timestamp-router-action.adoc index 21787181f..71d565ce4 100644 --- a/docs/modules/ROOT/pages/message-timestamp-router-action.adoc +++ b/docs/modules/ROOT/pages/message-timestamp-router-action.adoc @@ -27,7 +27,7 @@ This section summarizes how the `message-timestamp-router-action` can be used in === Knative Action -The `message-timestamp-router-action` Kamelet can be used as intermediate step in a binding. +The `message-timestamp-router-action` Kamelet can be used as intermediate step in a Knative binding. .message-timestamp-router-action-binding.yaml [source,yaml] @@ -69,4 +69,73 @@ You can run the action using the following command: ---- kubectl apply -f message-timestamp-router-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step message-timestamp-router-action -p "step-0.timestampKeys=The Timestamp Keys" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `message-timestamp-router-action` Kamelet can be used as intermediate step in a Kafka binding. + +.message-timestamp-router-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: message-timestamp-router-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: message-timestamp-router-action + properties: + timestampKeys: "The Timestamp Keys" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `message-timestamp-router-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f message-timestamp-router-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step message-timestamp-router-action -p "step-0.timestampKeys=The Timestamp Keys" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/minio-sink.adoc b/docs/modules/ROOT/pages/minio-sink.adoc index a8f00a638..8c8bedbd2 100644 --- a/docs/modules/ROOT/pages/minio-sink.adoc +++ b/docs/modules/ROOT/pages/minio-sink.adoc @@ -72,4 +72,69 @@ You can run the sink using the following command: ---- kubectl apply -f minio-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel minio-sink -p "sink.accessKey=The Access Key" -p "sink.bucketName=The Bucket Name" -p "sink.endpoint=http://localhost:9000" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `minio-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.minio-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: minio-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: minio-sink + properties: + accessKey: "The Access Key" + bucketName: "The Bucket Name" + endpoint: "http://localhost:9000" + secretKey: "The Secret Key" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `minio-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f minio-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic minio-sink -p "sink.accessKey=The Access Key" -p "sink.bucketName=The Bucket Name" -p "sink.endpoint=http://localhost:9000" -p "sink.secretKey=The Secret Key" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/minio-source.adoc b/docs/modules/ROOT/pages/minio-source.adoc index 8233dfd32..a17347e0c 100644 --- a/docs/modules/ROOT/pages/minio-source.adoc +++ b/docs/modules/ROOT/pages/minio-source.adoc @@ -67,4 +67,69 @@ You can run the source using the following command: ---- kubectl apply -f minio-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind minio-source -p "source.accessKey=The Access Key" -p "source.bucketName=The Bucket Name" -p "source.endpoint=http://localhost:9000" -p "source.secretKey=The Secret Key" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `minio-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.minio-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: minio-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: minio-source + properties: + accessKey: "The Access Key" + bucketName: "The Bucket Name" + endpoint: "http://localhost:9000" + secretKey: "The Secret Key" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `minio-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f minio-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind minio-source -p "source.accessKey=The Access Key" -p "source.bucketName=The Bucket Name" -p "source.endpoint=http://localhost:9000" -p "source.secretKey=The Secret Key" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/mqtt-source.adoc b/docs/modules/ROOT/pages/mqtt-source.adoc index 14f211a0b..897e3f199 100644 --- a/docs/modules/ROOT/pages/mqtt-source.adoc +++ b/docs/modules/ROOT/pages/mqtt-source.adoc @@ -62,4 +62,67 @@ You can run the source using the following command: ---- kubectl apply -f mqtt-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mqtt-source -p "source.brokerUrl=tcp://mosquitto:1883" -p "source.topic=mytopic" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `mqtt-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.mqtt-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: mqtt-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: mqtt-source + properties: + brokerUrl: "tcp://mosquitto:1883" + topic: "mytopic" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `mqtt-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f mqtt-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mqtt-source -p "source.brokerUrl=tcp://mosquitto:1883" -p "source.topic=mytopic" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/mysql-sink.adoc b/docs/modules/ROOT/pages/mysql-sink.adoc index 1c4859eb7..a53eba804 100644 --- a/docs/modules/ROOT/pages/mysql-sink.adoc +++ b/docs/modules/ROOT/pages/mysql-sink.adoc @@ -80,4 +80,70 @@ You can run the sink using the following command: ---- kubectl apply -f mysql-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel mysql-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `mysql-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.mysql-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: mysql-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: mysql-sink + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `mysql-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f mysql-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic mysql-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/mysql-source.adoc b/docs/modules/ROOT/pages/mysql-source.adoc index d5587c843..b13ca7000 100644 --- a/docs/modules/ROOT/pages/mysql-source.adoc +++ b/docs/modules/ROOT/pages/mysql-source.adoc @@ -73,4 +73,70 @@ You can run the source using the following command: ---- kubectl apply -f mysql-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mysql-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `mysql-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.mysql-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: mysql-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: mysql-source + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `mysql-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f mysql-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind mysql-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/nats-sink.adoc b/docs/modules/ROOT/pages/nats-sink.adoc index ce01220dd..a790bef74 100644 --- a/docs/modules/ROOT/pages/nats-sink.adoc +++ b/docs/modules/ROOT/pages/nats-sink.adoc @@ -61,4 +61,67 @@ You can run the sink using the following command: ---- kubectl apply -f nats-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel nats-sink -p "sink.servers=The Servers" -p "sink.topic=The Topic" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `nats-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.nats-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: nats-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: nats-sink + properties: + servers: "The Servers" + topic: "The Topic" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `nats-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f nats-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic nats-sink -p "sink.servers=The Servers" -p "sink.topic=The Topic" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/nats-source.adoc b/docs/modules/ROOT/pages/nats-source.adoc index 7091ea6f4..71e796024 100644 --- a/docs/modules/ROOT/pages/nats-source.adoc +++ b/docs/modules/ROOT/pages/nats-source.adoc @@ -61,4 +61,67 @@ You can run the source using the following command: ---- kubectl apply -f nats-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind nats-source -p "source.servers=The Servers" -p "source.topic=The Topic" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `nats-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.nats-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: nats-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: nats-source + properties: + servers: "The Servers" + topic: "The Topic" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `nats-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f nats-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind nats-source -p "source.servers=The Servers" -p "source.topic=The Topic" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/openai-classification-action.adoc b/docs/modules/ROOT/pages/openai-classification-action.adoc index bcd93e984..15d0f1eb8 100644 --- a/docs/modules/ROOT/pages/openai-classification-action.adoc +++ b/docs/modules/ROOT/pages/openai-classification-action.adoc @@ -37,7 +37,7 @@ This section summarizes how the `openai-classification-action` can be used in va === Knative Action -The `openai-classification-action` Kamelet can be used as intermediate step in a binding. +The `openai-classification-action` Kamelet can be used as intermediate step in a Knative binding. .openai-classification-action-binding.yaml [source,yaml] @@ -79,4 +79,73 @@ You can run the action using the following command: ---- kubectl apply -f openai-classification-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step openai-classification-action -p "step-0.authorizationToken=The Authorization Token" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `openai-classification-action` Kamelet can be used as intermediate step in a Kafka binding. + +.openai-classification-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: openai-classification-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: openai-classification-action + properties: + authorizationToken: "The Authorization Token" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `openai-classification-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f openai-classification-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step openai-classification-action -p "step-0.authorizationToken=The Authorization Token" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/openai-completion-action.adoc b/docs/modules/ROOT/pages/openai-completion-action.adoc index d3f1623b9..c9af81615 100644 --- a/docs/modules/ROOT/pages/openai-completion-action.adoc +++ b/docs/modules/ROOT/pages/openai-completion-action.adoc @@ -34,7 +34,7 @@ This section summarizes how the `openai-completion-action` can be used in variou === Knative Action -The `openai-completion-action` Kamelet can be used as intermediate step in a binding. +The `openai-completion-action` Kamelet can be used as intermediate step in a Knative binding. .openai-completion-action-binding.yaml [source,yaml] @@ -76,4 +76,73 @@ You can run the action using the following command: ---- kubectl apply -f openai-completion-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step openai-completion-action -p "step-0.authorizationToken=The Authorization Token" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `openai-completion-action` Kamelet can be used as intermediate step in a Kafka binding. + +.openai-completion-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: openai-completion-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: openai-completion-action + properties: + authorizationToken: "The Authorization Token" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `openai-completion-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f openai-completion-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step openai-completion-action -p "step-0.authorizationToken=The Authorization Token" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/pdf-action.adoc b/docs/modules/ROOT/pages/pdf-action.adoc index 9a1edef9e..a9a329008 100644 --- a/docs/modules/ROOT/pages/pdf-action.adoc +++ b/docs/modules/ROOT/pages/pdf-action.adoc @@ -26,7 +26,7 @@ This section summarizes how the `pdf-action` can be used in various contexts. === Knative Action -The `pdf-action` Kamelet can be used as intermediate step in a binding. +The `pdf-action` Kamelet can be used as intermediate step in a Knative binding. .pdf-action-binding.yaml [source,yaml] @@ -66,4 +66,71 @@ You can run the action using the following command: ---- kubectl apply -f pdf-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step pdf-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `pdf-action` Kamelet can be used as intermediate step in a Kafka binding. + +.pdf-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: pdf-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: pdf-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `pdf-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f pdf-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step pdf-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/postgresql-sink.adoc b/docs/modules/ROOT/pages/postgresql-sink.adoc index 8633aec36..ac34c83e4 100644 --- a/docs/modules/ROOT/pages/postgresql-sink.adoc +++ b/docs/modules/ROOT/pages/postgresql-sink.adoc @@ -76,4 +76,70 @@ You can run the sink using the following command: ---- kubectl apply -f postgresql-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel postgresql-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `postgresql-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.postgresql-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: postgresql-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: postgresql-sink + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `postgresql-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f postgresql-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic postgresql-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/postgresql-source.adoc b/docs/modules/ROOT/pages/postgresql-source.adoc index 6839c0aec..d1a3da61e 100644 --- a/docs/modules/ROOT/pages/postgresql-source.adoc +++ b/docs/modules/ROOT/pages/postgresql-source.adoc @@ -69,4 +69,70 @@ You can run the source using the following command: ---- kubectl apply -f postgresql-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind postgresql-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `postgresql-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.postgresql-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: postgresql-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: postgresql-source + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `postgresql-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f postgresql-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind postgresql-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/predicate-filter-action.adoc b/docs/modules/ROOT/pages/predicate-filter-action.adoc index 9095c5b85..42ef01eb0 100644 --- a/docs/modules/ROOT/pages/predicate-filter-action.adoc +++ b/docs/modules/ROOT/pages/predicate-filter-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `predicate-filter-action` can be used in various === Knative Action -The `predicate-filter-action` Kamelet can be used as intermediate step in a binding. +The `predicate-filter-action` Kamelet can be used as intermediate step in a Knative binding. .predicate-filter-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f predicate-filter-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step predicate-filter-action -p "step-0.expression=@.foo =~ /.*John/" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `predicate-filter-action` Kamelet can be used as intermediate step in a Kafka binding. + +.predicate-filter-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: predicate-filter-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: predicate-filter-action + properties: + expression: "@.foo =~ /.*John/" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `predicate-filter-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f predicate-filter-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step predicate-filter-action -p "step-0.expression=@.foo =~ /.*John/" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/protobuf-deserialize-action.adoc b/docs/modules/ROOT/pages/protobuf-deserialize-action.adoc index 244ee292d..0ef4fc0b4 100644 --- a/docs/modules/ROOT/pages/protobuf-deserialize-action.adoc +++ b/docs/modules/ROOT/pages/protobuf-deserialize-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `protobuf-deserialize-action` can be used in var === Knative Action -The `protobuf-deserialize-action` Kamelet can be used as intermediate step in a binding. +The `protobuf-deserialize-action` Kamelet can be used as intermediate step in a Knative binding. .protobuf-deserialize-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f protobuf-deserialize-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step protobuf-deserialize-action -p "step-0.schema=message Person { required string first = 1; required string last = 2; }" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `protobuf-deserialize-action` Kamelet can be used as intermediate step in a Kafka binding. + +.protobuf-deserialize-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: protobuf-deserialize-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: protobuf-deserialize-action + properties: + schema: "message Person { required string first = 1; required string last = 2; }" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `protobuf-deserialize-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f protobuf-deserialize-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step protobuf-deserialize-action -p "step-0.schema=message Person { required string first = 1; required string last = 2; }" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/protobuf-serialize-action.adoc b/docs/modules/ROOT/pages/protobuf-serialize-action.adoc index b8d59f692..fca537e94 100644 --- a/docs/modules/ROOT/pages/protobuf-serialize-action.adoc +++ b/docs/modules/ROOT/pages/protobuf-serialize-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `protobuf-serialize-action` can be used in vario === Knative Action -The `protobuf-serialize-action` Kamelet can be used as intermediate step in a binding. +The `protobuf-serialize-action` Kamelet can be used as intermediate step in a Knative binding. .protobuf-serialize-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f protobuf-serialize-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step protobuf-serialize-action -p "step-0.schema=message Person { required string first = 1; required string last = 2; }" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `protobuf-serialize-action` Kamelet can be used as intermediate step in a Kafka binding. + +.protobuf-serialize-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: protobuf-serialize-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: protobuf-serialize-action + properties: + schema: "message Person { required string first = 1; required string last = 2; }" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `protobuf-serialize-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f protobuf-serialize-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step protobuf-serialize-action -p "step-0.schema=message Person { required string first = 1; required string last = 2; }" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/rabbitmq-source.adoc b/docs/modules/ROOT/pages/rabbitmq-source.adoc index b71eef711..7183ac795 100644 --- a/docs/modules/ROOT/pages/rabbitmq-source.adoc +++ b/docs/modules/ROOT/pages/rabbitmq-source.adoc @@ -66,4 +66,69 @@ You can run the source using the following command: ---- kubectl apply -f rabbitmq-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind rabbitmq-source -p "source.addresses=The Addresses" -p "source.exchangeName=The Exchange name" -p "source.password=The Password" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `rabbitmq-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.rabbitmq-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: rabbitmq-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: rabbitmq-source + properties: + addresses: "The Addresses" + exchangeName: "The Exchange name" + password: "The Password" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `rabbitmq-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f rabbitmq-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind rabbitmq-source -p "source.addresses=The Addresses" -p "source.exchangeName=The Exchange name" -p "source.password=The Password" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/regex-router-action.adoc b/docs/modules/ROOT/pages/regex-router-action.adoc index 4f0d78cc1..6548df078 100644 --- a/docs/modules/ROOT/pages/regex-router-action.adoc +++ b/docs/modules/ROOT/pages/regex-router-action.adoc @@ -25,7 +25,7 @@ This section summarizes how the `regex-router-action` can be used in various con === Knative Action -The `regex-router-action` Kamelet can be used as intermediate step in a binding. +The `regex-router-action` Kamelet can be used as intermediate step in a Knative binding. .regex-router-action-binding.yaml [source,yaml] @@ -68,4 +68,74 @@ You can run the action using the following command: ---- kubectl apply -f regex-router-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step regex-router-action -p "step-0.regex=The Regex" -p "step-0.replacement=The Replacement" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `regex-router-action` Kamelet can be used as intermediate step in a Kafka binding. + +.regex-router-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: regex-router-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: regex-router-action + properties: + regex: "The Regex" + replacement: "The Replacement" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `regex-router-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f regex-router-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step regex-router-action -p "step-0.regex=The Regex" -p "step-0.replacement=The Replacement" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/replace-field-action.adoc b/docs/modules/ROOT/pages/replace-field-action.adoc index 2db7339cb..be08e1c78 100644 --- a/docs/modules/ROOT/pages/replace-field-action.adoc +++ b/docs/modules/ROOT/pages/replace-field-action.adoc @@ -26,7 +26,7 @@ This section summarizes how the `replace-field-action` can be used in various co === Knative Action -The `replace-field-action` Kamelet can be used as intermediate step in a binding. +The `replace-field-action` Kamelet can be used as intermediate step in a Knative binding. .replace-field-action-binding.yaml [source,yaml] @@ -70,4 +70,75 @@ You can run the action using the following command: ---- kubectl apply -f replace-field-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step replace-field-action -p "step-0.disabled=The Disabled" -p "step-0.enabled=The Enabled" -p "step-0.renames=foo:bar,c1:c2" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `replace-field-action` Kamelet can be used as intermediate step in a Kafka binding. + +.replace-field-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: replace-field-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: replace-field-action + properties: + disabled: "The Disabled" + enabled: "The Enabled" + renames: "foo:bar,c1:c2" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `replace-field-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f replace-field-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step replace-field-action -p "step-0.disabled=The Disabled" -p "step-0.enabled=The Enabled" -p "step-0.renames=foo:bar,c1:c2" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/salesforce-source.adoc b/docs/modules/ROOT/pages/salesforce-source.adoc index a825b827c..5ef886446 100644 --- a/docs/modules/ROOT/pages/salesforce-source.adoc +++ b/docs/modules/ROOT/pages/salesforce-source.adoc @@ -70,4 +70,71 @@ You can run the source using the following command: ---- kubectl apply -f salesforce-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind salesforce-source -p "source.clientId=The Consumer Key" -p "source.clientSecret=The Consumer Secret" -p "source.password=The Password" -p "source.query=SELECT Id, Name, Email, Phone FROM Contact" -p "source.topicName=ContactTopic" -p "source.userName=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `salesforce-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.salesforce-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: salesforce-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: salesforce-source + properties: + clientId: "The Consumer Key" + clientSecret: "The Consumer Secret" + password: "The Password" + query: "SELECT Id, Name, Email, Phone FROM Contact" + topicName: "ContactTopic" + userName: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `salesforce-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f salesforce-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind salesforce-source -p "source.clientId=The Consumer Key" -p "source.clientSecret=The Consumer Secret" -p "source.password=The Password" -p "source.query=SELECT Id, Name, Email, Phone FROM Contact" -p "source.topicName=ContactTopic" -p "source.userName=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/sftp-sink.adoc b/docs/modules/ROOT/pages/sftp-sink.adoc index 80428f2ce..57ac113b2 100644 --- a/docs/modules/ROOT/pages/sftp-sink.adoc +++ b/docs/modules/ROOT/pages/sftp-sink.adoc @@ -74,4 +74,69 @@ You can run the sink using the following command: ---- kubectl apply -f sftp-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel sftp-sink -p "sink.directoryName=The Directory Name" -p "sink.host=The Host" -p "sink.password=The Password" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `sftp-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.sftp-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: sftp-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: sftp-sink + properties: + directoryName: "The Directory Name" + host: "The Host" + password: "The Password" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `sftp-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f sftp-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic sftp-sink -p "sink.directoryName=The Directory Name" -p "sink.host=The Host" -p "sink.password=The Password" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/sftp-source.adoc b/docs/modules/ROOT/pages/sftp-source.adoc index 1d72d8aa3..6ab94eb68 100644 --- a/docs/modules/ROOT/pages/sftp-source.adoc +++ b/docs/modules/ROOT/pages/sftp-source.adoc @@ -69,4 +69,69 @@ You can run the source using the following command: ---- kubectl apply -f sftp-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind sftp-source -p "source.directoryName=The Directory Name" -p "source.host=The Host" -p "source.password=The Password" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `sftp-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.sftp-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: sftp-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: sftp-source + properties: + directoryName: "The Directory Name" + host: "The Host" + password: "The Password" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `sftp-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f sftp-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind sftp-source -p "source.directoryName=The Directory Name" -p "source.host=The Host" -p "source.password=The Password" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/slack-source.adoc b/docs/modules/ROOT/pages/slack-source.adoc index 36ba70a83..f3b9d982a 100644 --- a/docs/modules/ROOT/pages/slack-source.adoc +++ b/docs/modules/ROOT/pages/slack-source.adoc @@ -61,4 +61,67 @@ You can run the source using the following command: ---- kubectl apply -f slack-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind slack-source -p "source.channel=#myroom" -p "source.token=The Token" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `slack-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.slack-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: slack-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: slack-source + properties: + channel: "#myroom" + token: "The Token" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `slack-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f slack-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind slack-source -p "source.channel=#myroom" -p "source.token=The Token" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/sqlserver-sink.adoc b/docs/modules/ROOT/pages/sqlserver-sink.adoc index 042d1f0f0..3701bd835 100644 --- a/docs/modules/ROOT/pages/sqlserver-sink.adoc +++ b/docs/modules/ROOT/pages/sqlserver-sink.adoc @@ -80,4 +80,70 @@ You can run the sink using the following command: ---- kubectl apply -f sqlserver-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel sqlserver-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `sqlserver-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.sqlserver-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: sqlserver-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: sqlserver-sink + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `sqlserver-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f sqlserver-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic sqlserver-sink -p "sink.databaseName=The Database Name" -p "sink.password=The Password" -p "sink.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "sink.serverName=localhost" -p "sink.username=The Username" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/sqlserver-source.adoc b/docs/modules/ROOT/pages/sqlserver-source.adoc index a1d5c7a7f..1227d8d2b 100644 --- a/docs/modules/ROOT/pages/sqlserver-source.adoc +++ b/docs/modules/ROOT/pages/sqlserver-source.adoc @@ -73,4 +73,70 @@ You can run the source using the following command: ---- kubectl apply -f sqlserver-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind sqlserver-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `sqlserver-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.sqlserver-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: sqlserver-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: sqlserver-source + properties: + databaseName: "The Database Name" + password: "The Password" + query: "INSERT INTO accounts (username,city) VALUES (:#username,:#city)" + serverName: "localhost" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `sqlserver-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f sqlserver-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind sqlserver-source -p "source.databaseName=The Database Name" -p "source.password=The Password" -p "source.query=INSERT INTO accounts (username,city) VALUES (:#username,:#city)" -p "source.serverName=localhost" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/ssh-source.adoc b/docs/modules/ROOT/pages/ssh-source.adoc index ebebf8d77..1385aa34b 100644 --- a/docs/modules/ROOT/pages/ssh-source.adoc +++ b/docs/modules/ROOT/pages/ssh-source.adoc @@ -67,4 +67,69 @@ You can run the source using the following command: ---- kubectl apply -f ssh-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind ssh-source -p "source.host=The Host" -p "source.password=The Password" -p "source.pollCommand=date" -p "source.username=The Username" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `ssh-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.ssh-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: ssh-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: ssh-source + properties: + host: "The Host" + password: "The Password" + pollCommand: "date" + username: "The Username" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `ssh-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f ssh-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind ssh-source -p "source.host=The Host" -p "source.password=The Password" -p "source.pollCommand=date" -p "source.username=The Username" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/telegram-sink.adoc b/docs/modules/ROOT/pages/telegram-sink.adoc index f4cec4f9c..d2371f5ca 100644 --- a/docs/modules/ROOT/pages/telegram-sink.adoc +++ b/docs/modules/ROOT/pages/telegram-sink.adoc @@ -73,4 +73,66 @@ You can run the sink using the following command: ---- kubectl apply -f telegram-sink-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind channel/mychannel telegram-sink -p "sink.authorizationToken=The Token" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Sink + +The `telegram-sink` Kamelet can be used as Kafka sink by binding it to a Kafka topic. + +.telegram-sink-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: telegram-sink-binding +spec: + source: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: telegram-sink + properties: + authorizationToken: "The Token" + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `telegram-sink-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the sink using the following command: + +[source,shell] +---- +kubectl apply -f telegram-sink-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind kafka.strimzi.io/v1beta1:KafkaTopic:my-topic telegram-sink -p "sink.authorizationToken=The Token" +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/telegram-source.adoc b/docs/modules/ROOT/pages/telegram-source.adoc index b5e832749..316b2fca9 100644 --- a/docs/modules/ROOT/pages/telegram-source.adoc +++ b/docs/modules/ROOT/pages/telegram-source.adoc @@ -61,4 +61,66 @@ You can run the source using the following command: ---- kubectl apply -f telegram-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind telegram-source -p "source.authorizationToken=The Token" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `telegram-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.telegram-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: telegram-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: telegram-source + properties: + authorizationToken: "The Token" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `telegram-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f telegram-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind telegram-source -p "source.authorizationToken=The Token" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/timer-source.adoc b/docs/modules/ROOT/pages/timer-source.adoc index 6f942d062..f84f3c66e 100644 --- a/docs/modules/ROOT/pages/timer-source.adoc +++ b/docs/modules/ROOT/pages/timer-source.adoc @@ -61,4 +61,66 @@ You can run the source using the following command: ---- kubectl apply -f timer-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source -p "source.message=hello world" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `timer-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.timer-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: timer-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "hello world" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `timer-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f timer-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source -p "source.message=hello world" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/timestamp-router-action.adoc b/docs/modules/ROOT/pages/timestamp-router-action.adoc index 0a4a86b37..ec795c4d8 100644 --- a/docs/modules/ROOT/pages/timestamp-router-action.adoc +++ b/docs/modules/ROOT/pages/timestamp-router-action.adoc @@ -26,7 +26,7 @@ This section summarizes how the `timestamp-router-action` can be used in various === Knative Action -The `timestamp-router-action` Kamelet can be used as intermediate step in a binding. +The `timestamp-router-action` Kamelet can be used as intermediate step in a Knative binding. .timestamp-router-action-binding.yaml [source,yaml] @@ -66,4 +66,71 @@ You can run the action using the following command: ---- kubectl apply -f timestamp-router-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step timestamp-router-action channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `timestamp-router-action` Kamelet can be used as intermediate step in a Kafka binding. + +.timestamp-router-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: timestamp-router-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timestamp-router-action + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `timestamp-router-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f timestamp-router-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step timestamp-router-action kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/topic-name-matches-filter-action.adoc b/docs/modules/ROOT/pages/topic-name-matches-filter-action.adoc index ec9b6da90..34277d998 100644 --- a/docs/modules/ROOT/pages/topic-name-matches-filter-action.adoc +++ b/docs/modules/ROOT/pages/topic-name-matches-filter-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `topic-name-matches-filter-action` can be used i === Knative Action -The `topic-name-matches-filter-action` Kamelet can be used as intermediate step in a binding. +The `topic-name-matches-filter-action` Kamelet can be used as intermediate step in a Knative binding. .topic-name-matches-filter-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f topic-name-matches-filter-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step topic-name-matches-filter-action -p "step-0.regex=The Regex" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `topic-name-matches-filter-action` Kamelet can be used as intermediate step in a Kafka binding. + +.topic-name-matches-filter-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: topic-name-matches-filter-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: topic-name-matches-filter-action + properties: + regex: "The Regex" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `topic-name-matches-filter-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f topic-name-matches-filter-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step topic-name-matches-filter-action -p "step-0.regex=The Regex" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/twitter-directmessage-source.adoc b/docs/modules/ROOT/pages/twitter-directmessage-source.adoc index eb2609e68..5c4bf1f85 100644 --- a/docs/modules/ROOT/pages/twitter-directmessage-source.adoc +++ b/docs/modules/ROOT/pages/twitter-directmessage-source.adoc @@ -70,4 +70,70 @@ You can run the source using the following command: ---- kubectl apply -f twitter-directmessage-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind twitter-directmessage-source -p "source.accessToken=The Access Token" -p "source.accessTokenSecret=The Access Token Secret" -p "source.apiKey=The API Key" -p "source.apiKeySecret=The API Key Secret" -p "source.user=ApacheCamel" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `twitter-directmessage-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.twitter-directmessage-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: twitter-directmessage-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: twitter-directmessage-source + properties: + accessToken: "The Access Token" + accessTokenSecret: "The Access Token Secret" + apiKey: "The API Key" + apiKeySecret: "The API Key Secret" + user: "ApacheCamel" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `twitter-directmessage-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f twitter-directmessage-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind twitter-directmessage-source -p "source.accessToken=The Access Token" -p "source.accessTokenSecret=The Access Token Secret" -p "source.apiKey=The API Key" -p "source.apiKeySecret=The API Key Secret" -p "source.user=ApacheCamel" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/twitter-search-source.adoc b/docs/modules/ROOT/pages/twitter-search-source.adoc index 176c5c03e..967f08868 100644 --- a/docs/modules/ROOT/pages/twitter-search-source.adoc +++ b/docs/modules/ROOT/pages/twitter-search-source.adoc @@ -70,4 +70,70 @@ You can run the source using the following command: ---- kubectl apply -f twitter-search-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind twitter-search-source -p "source.accessToken=The Access Token" -p "source.accessTokenSecret=The Access Token Secret" -p "source.apiKey=The API Key" -p "source.apiKeySecret=The API Key Secret" -p "source.keywords=Apache Camel" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `twitter-search-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.twitter-search-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: twitter-search-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: twitter-search-source + properties: + accessToken: "The Access Token" + accessTokenSecret: "The Access Token Secret" + apiKey: "The API Key" + apiKeySecret: "The API Key Secret" + keywords: "Apache Camel" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `twitter-search-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f twitter-search-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind twitter-search-source -p "source.accessToken=The Access Token" -p "source.accessTokenSecret=The Access Token Secret" -p "source.apiKey=The API Key" -p "source.apiKeySecret=The API Key Secret" -p "source.keywords=Apache Camel" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/twitter-timeline-source.adoc b/docs/modules/ROOT/pages/twitter-timeline-source.adoc index 9f10db37d..76e10378a 100644 --- a/docs/modules/ROOT/pages/twitter-timeline-source.adoc +++ b/docs/modules/ROOT/pages/twitter-timeline-source.adoc @@ -70,4 +70,70 @@ You can run the source using the following command: ---- kubectl apply -f twitter-timeline-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind twitter-timeline-source -p "source.accessToken=The Access Token" -p "source.accessTokenSecret=The Access Token Secret" -p "source.apiKey=The API Key" -p "source.apiKeySecret=The API Key Secret" -p "source.user=ApacheCamel" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `twitter-timeline-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.twitter-timeline-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: twitter-timeline-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: twitter-timeline-source + properties: + accessToken: "The Access Token" + accessTokenSecret: "The Access Token Secret" + apiKey: "The API Key" + apiKeySecret: "The API Key Secret" + user: "ApacheCamel" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `twitter-timeline-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f twitter-timeline-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind twitter-timeline-source -p "source.accessToken=The Access Token" -p "source.accessTokenSecret=The Access Token Secret" -p "source.apiKey=The API Key" -p "source.apiKeySecret=The API Key Secret" -p "source.user=ApacheCamel" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/value-to-key-action.adoc b/docs/modules/ROOT/pages/value-to-key-action.adoc index 917697a94..744bd6703 100644 --- a/docs/modules/ROOT/pages/value-to-key-action.adoc +++ b/docs/modules/ROOT/pages/value-to-key-action.adoc @@ -24,7 +24,7 @@ This section summarizes how the `value-to-key-action` can be used in various con === Knative Action -The `value-to-key-action` Kamelet can be used as intermediate step in a binding. +The `value-to-key-action` Kamelet can be used as intermediate step in a Knative binding. .value-to-key-action-binding.yaml [source,yaml] @@ -66,4 +66,73 @@ You can run the action using the following command: ---- kubectl apply -f value-to-key-action-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step value-to-key-action -p "step-0.fields=The Fields" channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Action + +The `value-to-key-action` Kamelet can be used as intermediate step in a Kafka binding. + +.value-to-key-action-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: value-to-key-action-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + message: "Hello" + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: value-to-key-action + properties: + fields: "The Fields" + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `value-to-key-action-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the action using the following command: + +[source,shell] +---- +kubectl apply -f value-to-key-action-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind timer-source?message=Hello --step value-to-key-action -p "step-0.fields=The Fields" kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/docs/modules/ROOT/pages/webhook-source.adoc b/docs/modules/ROOT/pages/webhook-source.adoc index 49169dc02..33c643c4a 100644 --- a/docs/modules/ROOT/pages/webhook-source.adoc +++ b/docs/modules/ROOT/pages/webhook-source.adoc @@ -63,4 +63,64 @@ You can run the source using the following command: ---- kubectl apply -f webhook-source-binding.yaml ---- + +==== *Binding to Knative using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind webhook-source channel/mychannel +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + +=== Kafka Source + +The `webhook-source` Kamelet can be used as Kafka source by binding it to a Kafka topic. + +.webhook-source-binding.yaml +[source,yaml] +---- +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: webhook-source-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: webhook-source + sink: + ref: + kind: KafkaTopic + apiVersion: kafka.strimzi.io/v1beta1 + name: my-topic + +---- + +Ensure that you've installed https://strimzi.io/[Strimzi] and created a topic named `my-topic` in the current namespace. +Make also sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to. + +Save the `webhook-source-binding.yaml` file into your hard drive, then configure it according to your needs. + +You can run the source using the following command: + +[source,shell] +---- +kubectl apply -f webhook-source-binding.yaml +---- + +==== *Binding to Kafka using the Kamel CLI:* + +The procedure described above can be simplified into a single execution of the `kamel bind` command: + +[source,shell] +---- +kamel bind webhook-source kafka.strimzi.io/v1beta1:KafkaTopic:my-topic +---- + +This will create the KameletBinding under the hood and apply it to the current namespace in the cluster. + // THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT diff --git a/script/generator/generator.go b/script/generator/generator.go index b2953f373..f857544af 100644 --- a/script/generator/generator.go +++ b/script/generator/generator.go @@ -1,15 +1,20 @@ package main import ( + "bytes" "encoding/base64" + "errors" "fmt" "io/ioutil" "os" + "path" "path/filepath" "sort" "strings" + "text/template" camel "github.com/apache/camel-k/pkg/apis/camel/v1alpha1" + "github.com/iancoleman/strcase" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/runtime/serializer" @@ -25,12 +30,27 @@ func main() { dir := os.Args[1] out := os.Args[2] + funcMap := template.FuncMap{ + "ToCamel": strcase.ToCamel, + } + + templateFile := path.Join(out, "kamelet.adoc.tmpl") + t, err := template.New("kamelet.adoc.tmpl").Funcs(funcMap).ParseFiles(templateFile) + handleGeneralError(fmt.Sprintf("cannot load template file from %s", templateFile), err) + kamelets := listKamelets(dir) links := make([]string, 0) for _, k := range kamelets { img := saveImage(k, out) - produceDoc(k, out, img) + + ctx := NewTemplateContext(k, img) + + buffer := new(bytes.Buffer) + err = t.Execute(buffer, &ctx) + handleGeneralError("cannot process template", err) + + produceDocFile(k, out, buffer.String()) links = append(links, fmt.Sprintf("* xref:ROOT:%s.adoc[%s %s]", k.Name, img, k.Spec.Definition.Title)) } @@ -38,154 +58,122 @@ func main() { saveNav(links, out) } -func saveNav(links []string, out string) { - content := "// THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT\n" - for _, l := range links { - content += l + "\n" - } - content += "// THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT\n" - dest := filepath.Join(out, "nav.adoc") - if _, err := os.Stat(dest); err == nil { - err = os.Remove(dest) - handleGeneralError(fmt.Sprintf("cannot remove file %q", dest), err) - } - err := ioutil.WriteFile(dest, []byte(content), 0666) - handleGeneralError(fmt.Sprintf("cannot write file %q", dest), err) - fmt.Printf("%q written\n", dest) +type TemplateContext struct { + Kamelet camel.Kamelet + Image string } -func saveImage(k camel.Kamelet, out string) string { - if ic, ok := k.ObjectMeta.Annotations["camel.apache.org/kamelet.icon"]; ok { - svgb64Prefix := "data:image/svg+xml;base64," - if strings.HasPrefix(ic, svgb64Prefix) { - data := ic[len(svgb64Prefix):] - decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(data)) - iconContent, err := ioutil.ReadAll(decoder) - handleGeneralError(fmt.Sprintf("cannot decode icon from Kamelet %s", k.Name), err) - dest := filepath.Join(out, "assets", "images", "kamelets", fmt.Sprintf("%s.svg", k.Name)) - if _, err := os.Stat(dest); err == nil { - err = os.Remove(dest) - handleGeneralError(fmt.Sprintf("cannot remove file %q", dest), err) - } - err = ioutil.WriteFile(dest, iconContent, 0666) - handleGeneralError(fmt.Sprintf("cannot write file %q", dest), err) - fmt.Printf("%q written\n", dest) - return fmt.Sprintf("image:kamelets/%s.svg[]", k.Name) - } +func NewTemplateContext(kamelet camel.Kamelet, image string) TemplateContext { + return TemplateContext{ + Kamelet: kamelet, + Image: image, } - return "" } -func produceDoc(k camel.Kamelet, out string, image string) { - docFile := filepath.Join(out, "pages", k.Name+".adoc") +type Prop struct { + Name string + Title string + Required bool + Default *string + Example *string +} - content := "// THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT\n" - content += "= " + image + " " + k.Spec.Definition.Title + "\n" - content += "\n" - if prov, ok := k.Annotations["camel.apache.org/provider"]; ok { - content += fmt.Sprintf("*Provided by: %q*\n", prov) - content += "\n" +func (p Prop) GetSampleValue() string { + if p.Default != nil { + return *p.Default } - if supportLevel, ok := k.Annotations["camel.apache.org/kamelet.support.level"]; ok { - content += fmt.Sprintf("*Support Level for this Kamelet is: %q*\n", supportLevel) - content += "\n" + if p.Example != nil { + return *p.Example } - content += k.Spec.Definition.Description + "\n" - content += "\n" - content += "== Configuration Options\n" - content += "\n" + return fmt.Sprintf(`"The %s"`, p.Title) +} +func getSortedProps(k camel.Kamelet) []Prop { required := make(map[string]bool) - keys := make([]string, 0, len(k.Spec.Definition.Properties)) - - if len(k.Spec.Definition.Properties) > 0 { - for _, r := range k.Spec.Definition.Required { - required[r] = true + props := make([]Prop, 0, len(k.Spec.Definition.Properties)) + for _, r := range k.Spec.Definition.Required { + required[r] = true + } + for key := range k.Spec.Definition.Properties { + prop := k.Spec.Definition.Properties[key] + var def *string + if prop.Default != nil { + b, err := prop.Default.MarshalJSON() + handleGeneralError(fmt.Sprintf("cannot marshal property %q default value in Kamelet %s", key, k.Name), err) + defVal := string(b) + def = &defVal } - - for key := range k.Spec.Definition.Properties { - keys = append(keys, key) + var ex *string + if prop.Example != nil { + b, err := prop.Example.MarshalJSON() + handleGeneralError(fmt.Sprintf("cannot marshal property %q example value in Kamelet %s", key, k.Name), err) + exVal := string(b) + ex = &exVal } - sort.Slice(keys, func(i, j int) bool { - ri := required[keys[i]] - rj := required[keys[j]] - if ri && !rj { - return true - } else if !ri && rj { - return false - } - return keys[i] < keys[j] - }) + props = append(props, Prop{Name: key, Title: prop.Title, Required: required[key], Default: def, Example: ex}) + } + sort.Slice(props, func(i, j int) bool { + ri := props[i].Required + rj := props[j].Required + if ri && !rj { + return true + } else if !ri && rj { + return false + } + return props[i].Name < props[j].Name + }) + return props +} - content += fmt.Sprintf("The following table summarizes the configuration options available for the `%s` Kamelet:\n", k.Name) +func (ctx *TemplateContext) HasProperties() bool { + return len(ctx.Kamelet.Spec.Definition.Properties) > 0 +} +func (ctx *TemplateContext) Properties() string { + content := "" + if len(ctx.Kamelet.Spec.Definition.Properties) > 0 { + props := getSortedProps(ctx.Kamelet) content += `[width="100%",cols="2,^2,3,^2,^2,^3",options="header"]` + "\n" content += "|===\n" content += tableLine("Property", "Name", "Description", "Type", "Default", "Example") - for _, key := range keys { - prop := k.Spec.Definition.Properties[key] + for _, propDef := range props { + key := propDef.Name + prop := ctx.Kamelet.Spec.Definition.Properties[key] name := key - if required[key] { + if propDef.Required { name = "*" + name + " {empty}* *" } - def := "" - if prop.Default != nil { - b, err := prop.Default.MarshalJSON() - handleGeneralError(fmt.Sprintf("cannot marshal property %q default value in Kamelet %s", key, k.Name), err) - def = "`" + strings.ReplaceAll(string(b), "`", "'") + "`" + var def string + if propDef.Default != nil { + def = "`" + strings.ReplaceAll(*propDef.Default, "`", "'") + "`" } - ex := "" - if prop.Example != nil { - b, err := prop.Example.MarshalJSON() - handleGeneralError(fmt.Sprintf("cannot marshal property %q example value in Kamelet %s", key, k.Name), err) - ex = "`" + strings.ReplaceAll(string(b), "`", "'") + "`" + var ex string + if propDef.Example != nil { + ex = "`" + strings.ReplaceAll(*propDef.Example, "`", "'") + "`" } content += tableLine(name, prop.Title, prop.Description, prop.Type, def, ex) } content += "|===\n" - content += "\n" - content += "NOTE: Fields marked with ({empty}*) are mandatory.\n" - } else { - content += "The Kamelet does not specify any configuration option.\n" } + return content +} - content += "\n" - content += "== Usage\n" - content += "\n" - content += fmt.Sprintf("This section summarizes how the `%s` can be used in various contexts.\n", k.Name) - content += "\n" - - tp := k.ObjectMeta.Labels["camel.apache.org/kamelet.type"] +func (ctx *TemplateContext) ExampleBinding(apiVersion, kind, name string) string { + content := "" + propDefs := getSortedProps(ctx.Kamelet) + tp := ctx.Kamelet.ObjectMeta.Labels["camel.apache.org/kamelet.type"] if tp != "" { - content += fmt.Sprintf("=== Knative %s\n", strings.Title(tp)) - content += "\n" - - if tp != "action" { - content += fmt.Sprintf("The `%s` Kamelet can be used as Knative %s by binding it to a Knative object.\n", k.Name, tp) - } else { - content += fmt.Sprintf("The `%s` Kamelet can be used as intermediate step in a binding.\n", k.Name) - } - content += "\n" - sampleConfig := make([]string, 0) - for _, key := range keys { - if !required[key] { + for _, propDef := range propDefs { + if !propDef.Required { continue } - prop := k.Spec.Definition.Properties[key] - if prop.Default == nil { - ex := "" - if prop.Example != nil { - b, err := prop.Example.MarshalJSON() - handleGeneralError(fmt.Sprintf("cannot marshal property %q example value in Kamelet %s", key, k.Name), err) - ex = string(b) - } - if ex == "" { - ex = `"The ` + prop.Title + `"` - } + key := propDef.Name + if propDef.Default == nil { + ex := propDef.GetSampleValue() sampleConfig = append(sampleConfig, fmt.Sprintf("%s: %s", key, ex)) } } @@ -201,13 +189,13 @@ func produceDoc(k camel.Kamelet, out string, image string) { kind: Kamelet apiVersion: camel.apache.org/v1alpha1 name: %s -%s`, k.Name, props) +%s`, ctx.Kamelet.Name, props) - knativeRef := ` ref: - kind: InMemoryChannel - apiVersion: messaging.knative.dev/v1 - name: mychannel -` + boundToRef := fmt.Sprintf(` ref: + kind: %s + apiVersion: %s + name: %s +`, kind, apiVersion, name) var sourceRef string var sinkRef string var steps string @@ -215,9 +203,9 @@ func produceDoc(k camel.Kamelet, out string, image string) { switch tp { case "source": sourceRef = kameletRef - sinkRef = knativeRef + sinkRef = boundToRef case "sink": - sourceRef = knativeRef + sourceRef = boundToRef sinkRef = kameletRef case "action": sourceRef = ` ref: @@ -226,7 +214,7 @@ func produceDoc(k camel.Kamelet, out string, image string) { name: timer-source properties: message: "Hello"` - sinkRef = knativeRef + sinkRef = boundToRef steps = fmt.Sprintf(` steps: -%s`, kameletRef[3:]) @@ -240,30 +228,100 @@ spec: source: %s%s sink: %s -`, k.Name, sourceRef, steps, sinkRef) +`, ctx.Kamelet.Name, sourceRef, steps, sinkRef) - content += fmt.Sprintf(".%s-binding.yaml\n", k.Name) + content += fmt.Sprintf(".%s-binding.yaml\n", ctx.Kamelet.Name) content += "[source,yaml]\n" content += "----\n" content += binding content += "----\n" - content += "\n" - content += "Make sure you have xref:latest@camel-k::installation/installation.adoc[Camel K installed] into the Kubernetes cluster you're connected to.\n" - content += "\n" - content += fmt.Sprintf("Save the `%s-binding.yaml` file into your hard drive, then configure it according to your needs.\n", k.Name) - content += "\n" - content += fmt.Sprintf("You can run the %s using the following command:\n", tp) - content += "\n" - content += "[source,shell]\n" - content += "----\n" - content += fmt.Sprintf("kubectl apply -f %s-binding.yaml\n", k.Name) - content += "----\n" + } + return content +} +func (ctx *TemplateContext) ExampleKamelBindCommand(ref string) string { + tp := ctx.Kamelet.ObjectMeta.Labels["camel.apache.org/kamelet.type"] + var prefix string + switch tp { + case "source": + prefix = "source." + case "sink": + prefix = "sink." + case "action": + prefix = "step-0." + default: + handleGeneralError("unknown kamelet type", errors.New(tp)) } + cmd := "kamel bind " + timer := "timer-source?message=Hello" + kamelet := ctx.Kamelet.Name + propDefs := getSortedProps(ctx.Kamelet) + for _, p := range propDefs { + if p.Required && p.Default == nil { + val := p.GetSampleValue() + if strings.HasPrefix(val, `"`) { + kamelet += fmt.Sprintf(` -p "%s%s=%s`, prefix, p.Name, val[1:]) + } else { + kamelet += fmt.Sprintf(" -p %s%s=%s", prefix, p.Name, val) + } + } + } + + switch tp { + case "source": + return cmd + kamelet + " " + ref + case "sink": + return cmd + ref + " " + kamelet + case "action": + return cmd + timer + " --step " + kamelet + " " + ref + default: + handleGeneralError("unknown kamelet type", errors.New(tp)) + } + return "" +} + +func saveNav(links []string, out string) { + content := "// THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT\n" + for _, l := range links { + content += l + "\n" + } content += "// THIS FILE IS AUTOMATICALLY GENERATED: DO NOT EDIT\n" + dest := filepath.Join(out, "nav.adoc") + if _, err := os.Stat(dest); err == nil { + err = os.Remove(dest) + handleGeneralError(fmt.Sprintf("cannot remove file %q", dest), err) + } + err := ioutil.WriteFile(dest, []byte(content), 0666) + handleGeneralError(fmt.Sprintf("cannot write file %q", dest), err) + fmt.Printf("%q written\n", dest) +} + +func saveImage(k camel.Kamelet, out string) string { + if ic, ok := k.ObjectMeta.Annotations["camel.apache.org/kamelet.icon"]; ok { + svgb64Prefix := "data:image/svg+xml;base64," + if strings.HasPrefix(ic, svgb64Prefix) { + data := ic[len(svgb64Prefix):] + decoder := base64.NewDecoder(base64.StdEncoding, strings.NewReader(data)) + iconContent, err := ioutil.ReadAll(decoder) + handleGeneralError(fmt.Sprintf("cannot decode icon from Kamelet %s", k.Name), err) + dest := filepath.Join(out, "assets", "images", "kamelets", fmt.Sprintf("%s.svg", k.Name)) + if _, err := os.Stat(dest); err == nil { + err = os.Remove(dest) + handleGeneralError(fmt.Sprintf("cannot remove file %q", dest), err) + } + err = ioutil.WriteFile(dest, iconContent, 0666) + handleGeneralError(fmt.Sprintf("cannot write file %q", dest), err) + fmt.Printf("%q written\n", dest) + return fmt.Sprintf("image:kamelets/%s.svg[]", k.Name) + } + } + return "" +} +func produceDocFile(k camel.Kamelet, out string, content string) { + docFile := filepath.Join(out, "pages", k.Name+".adoc") if _, err := os.Stat(docFile); err == nil { err = os.Remove(docFile) handleGeneralError(fmt.Sprintf("cannot remove file %q", docFile), err) diff --git a/script/generator/go.mod b/script/generator/go.mod index 9833de264..a7b074634 100644 --- a/script/generator/go.mod +++ b/script/generator/go.mod @@ -4,7 +4,6 @@ go 1.14 require ( github.com/apache/camel-k/pkg/apis/camel v1.4.0 + github.com/iancoleman/strcase v0.1.3 k8s.io/apimachinery v0.19.8 - k8s.io/klog v1.0.0 // indirect - sigs.k8s.io/structured-merge-diff/v3 v3.0.0 // indirect ) diff --git a/script/generator/go.sum b/script/generator/go.sum index 2576cd736..f657a0153 100644 --- a/script/generator/go.sum +++ b/script/generator/go.sum @@ -3,13 +3,12 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= -github.com/apache/camel-k/pkg/apis/camel v1.3.1 h1:8w3oXV1gQbTeazfOGuvYiSy52s3Zx9f3TeFF0pWyJvs= -github.com/apache/camel-k/pkg/apis/camel v1.3.1/go.mod h1:N3N41+jPM447J8CvnzUcIdGZVvMh1dxP31xDSpZDeUk= github.com/apache/camel-k/pkg/apis/camel v1.4.0 h1:iXa6uCVvMUItFejc+7WuLvcRyfQaRYWtg+9pEexgcM8= github.com/apache/camel-k/pkg/apis/camel v1.4.0/go.mod h1:uv2KugsT0kfLiRJZS15UqQ3gHsvuhmOax9+0Rs9L1wY= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/docker/spdystream v0.0.0-20160310174837-449fdfce4d96/go.mod h1:Qh8CwZgvJUkLughtfhJv5dyTYa91l1fOUCrgjqmcifM= github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE= @@ -31,10 +30,8 @@ github.com/go-openapi/swag v0.0.0-20160704191624-1d0bd113de87/go.mod h1:DXUve3Dp github.com/gogo/protobuf v1.3.1 h1:DqDEcV5aeaTmdFBePNpYsp3FlcVH/2ISVVM9Qf8PSls= github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/protobuf v0.0.0-20161109072736-4bd1920723d7/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= @@ -48,26 +45,26 @@ github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.1.0 h1:Hsa8mG0dQ46ij8Sl2AYJDUv1oA9/d6Vk+3LG99Oe02g= github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/googleapis/gnostic v0.0.0-20170729233727-0c5108395e2d/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= -github.com/googleapis/gnostic v0.1.0/go.mod h1:sJBsCZ4ayReDTBIg8b9dl28c5xFWyhBTVRp3pOg5EKY= github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/iancoleman/strcase v0.1.3 h1:dJBk1m2/qjL1twPLf68JND55vvivMupZ4wIzE8CTdBw= +github.com/iancoleman/strcase v0.1.3/go.mod h1:SK73tn/9oHe+/Y0h39VT4UCxmurVJkR5NA7kMEAOgSE= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.8 h1:QiWkFLKq0T7mpzwOTu6BzNDbfTE8OLrYhVKYMLF46Ok= -github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.0 h1:s5hAObm+yFO5uHYt5dYjxi2rXrsnmRpJx4OYvIWUaQs= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/mailru/easyjson v0.0.0-20160728113105-d5b7844b561a/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= @@ -83,14 +80,16 @@ github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= @@ -98,15 +97,12 @@ golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20191004110552-13f9640d40b9 h1:rjwSpXsdiK0dV8/Naq3kAw9ymfAeJIyd0upUIElB+lI= -golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b h1:uwuIcX0g4Yl1NC5XAz37xsr2lTtcqevgzYNVt49waME= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= @@ -114,20 +110,15 @@ golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAG golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201112073958-5cba982894dd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/text v0.0.0-20160726164857-2910a502d2bf/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -137,6 +128,7 @@ golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= @@ -156,7 +148,7 @@ google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2 google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= @@ -168,27 +160,15 @@ gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -k8s.io/api v0.18.9 h1:7VDtivqwbvLOf8hmXSd/PDSSbpCBq49MELg84EYBYiQ= -k8s.io/api v0.18.9/go.mod h1:9u/h6sUh6FxfErv7QqetX1EB3yBMIYOBXzdcf0Gf0rc= k8s.io/api v0.19.8 h1:U50vBUCb5kc2J483mnITLKfdyoITaC7PnaOJwT7oRRM= k8s.io/api v0.19.8/go.mod h1:9TMGoKoidvicOK0LSqj+Mj98pugQycbViPKyZHqutBc= -k8s.io/apimachinery v0.18.9 h1:3ZABKQx3F3xPWlsGhCfUl8W+JXRRblV6Wo2A3zn0pvY= -k8s.io/apimachinery v0.18.9/go.mod h1:PF5taHbXgTEJLU+xMypMmYTXTWPJ5LaW8bfsisxnEXk= k8s.io/apimachinery v0.19.8 h1:MaehcNcx8brsgvMsqspcI0bi22E1np/DACnjf2mhJ5A= k8s.io/apimachinery v0.19.8/go.mod h1:6sRbGRAVY5DOCuZwB5XkqguBqpqLU6q/kOaOdk29z6Q= -k8s.io/gengo v0.0.0-20190128074634-0689ccc1d7d6/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= k8s.io/gengo v0.0.0-20200413195148-3a45101e95ac/go.mod h1:ezvh/TsK7cY6rbqRK0oQQ8IAqLxYwwyPxAX1Pzy0ii0= -k8s.io/klog v0.0.0-20181102134211-b9b56d5dfc92/go.mod h1:Gq+BEi5rUBO/HRz0bTSXDUcqjScdoY3a9IHpCEIOOfk= -k8s.io/klog v1.0.0 h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8= -k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I= k8s.io/klog/v2 v2.0.0/go.mod h1:PBfzABfn139FHAV07az/IF9Wp1bkk3vpT2XSJ76fSDE= k8s.io/klog/v2 v2.2.0 h1:XRvcwJozkgZ1UQJmfMGpvRthQHOvihEhYtDfAaxMz/A= k8s.io/klog/v2 v2.2.0/go.mod h1:Od+F08eJP+W3HUb4pSrPpgp9DGU4GzlpG/TmITuYh/Y= -k8s.io/kube-openapi v0.0.0-20200410145947-61e04a5be9a6/go.mod h1:GRQhZsXIAJ1xR0C9bd8UpWHZ5plfAS9fzPjJuQ6JL3E= k8s.io/kube-openapi v0.0.0-20200805222855-6aeccd4b50c6/go.mod h1:UuqjUnNftUyPE5H64/qeyjQoUZhGpeFDVdxjTeEVN2o= -sigs.k8s.io/structured-merge-diff/v3 v3.0.0-20200116222232-67a7b8c61874/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= -sigs.k8s.io/structured-merge-diff/v3 v3.0.0 h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E= -sigs.k8s.io/structured-merge-diff/v3 v3.0.0/go.mod h1:PlARxl6Hbt/+BC80dRLi1qAmnMqwqDg62YvvVkZjemw= sigs.k8s.io/structured-merge-diff/v4 v4.0.1 h1:YXTMot5Qz/X1iBRJhAt+vI+HVttY0WkSqqhKxQ0xVbA= sigs.k8s.io/structured-merge-diff/v4 v4.0.1/go.mod h1:bJZC9H9iH24zzfZ/41RGcq60oK1F7G282QMXDPYydCw= sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o=