From b1dec7f37c5e3e6ab7726a41c469c3b8d208e850 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Thu, 1 Dec 2022 15:50:32 +0100 Subject: [PATCH] Include experimental Kamelets in the catalog --- .github/workflows/yaks-tests.yaml | 9 +- .../aws-ddb-experimental-sink.kamelet.yaml | 10 +- .../aws-s3-experimental-source.kamelet.yaml | 8 +- .../aws-ddb-experimental-sink.kamelet.yaml | 148 ++++++++++++++++ .../aws-s3-experimental-source.kamelet.yaml | 167 ++++++++++++++++++ .../aws-ddb-sink-exp}/amazonDDBClient.groovy | 0 .../aws-ddb-sink-binding.yaml | 4 +- .../aws-ddb-sink-deleteItem.feature | 10 +- .../aws-ddb-sink-putItem.feature | 10 +- .../aws-ddb-sink-updateItem.feature | 10 +- .../aws-ddb-sink-exp}/putItem.groovy | 0 .../aws-ddb-sink-exp}/verifyItems.groovy | 0 .../aws-ddb-sink-exp}/yaks-config.yaml | 4 - .../aws-s3-exp}/amazonS3Client.groovy | 0 .../aws-s3-exp}/aws-s3-cloudevents.feature | 10 +- .../aws-s3-exp}/aws-s3-knative.feature | 10 +- .../aws-s3-exp}/aws-s3-to-knative.yaml | 4 +- .../experimental/aws-s3-exp}/yaks-config.yaml | 4 - 18 files changed, 359 insertions(+), 49 deletions(-) rename experimental/aws-ddb-sink.exp.kamelet.yaml => kamelets/aws-ddb-experimental-sink.kamelet.yaml (95%) rename experimental/aws-s3-source.exp.kamelet.yaml => kamelets/aws-s3-experimental-source.kamelet.yaml (96%) create mode 100644 library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml create mode 100644 library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/amazonDDBClient.groovy (100%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-binding.yaml (95%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-deleteItem.feature (87%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-putItem.feature (86%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-updateItem.feature (89%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/putItem.groovy (100%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/verifyItems.groovy (100%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/yaks-config.yaml (93%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/amazonS3Client.groovy (100%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/aws-s3-cloudevents.feature (86%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/aws-s3-knative.feature (85%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/aws-s3-to-knative.yaml (95%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/yaks-config.yaml (94%) diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml index 7f168ca25..a398b1a7f 100644 --- a/.github/workflows/yaks-tests.yaml +++ b/.github/workflows/yaks-tests.yaml @@ -65,7 +65,6 @@ jobs: # Overwrite JitPack coordinates in the local Kamelets so the tests can use the utility classes in this PR find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} + - find experimental -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} + - name: Get Camel K CLI run: | curl --fail -L --silent https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz -o kamel.tar.gz @@ -110,7 +109,7 @@ jobs: yaks install --operator-image $YAKS_IMAGE_NAME:$YAKS_VERSION - name: YAKS Tests run: | - echo "Running tests" + echo "Running tests for Kamelets" yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS yaks run test/aws-s3 $YAKS_RUN_OPTIONS @@ -122,11 +121,11 @@ jobs: yaks run test/earthquake-source $YAKS_RUN_OPTIONS yaks run test/rest-openapi-sink $YAKS_RUN_OPTIONS yaks run test/kafka $YAKS_RUN_OPTIONS - - name: YAKS Tests on experimental Kamelets + - name: YAKS Tests experimental Kamelets run: | echo "Running tests for experimental Kamelets" - yaks run experimental/test/aws-ddb-sink $YAKS_RUN_OPTIONS - yaks run experimental/test/aws-s3 $YAKS_RUN_OPTIONS + yaks run test/experimental/aws-ddb-sink-exp $YAKS_RUN_OPTIONS + yaks run test/experimental/aws-s3-exp $YAKS_RUN_OPTIONS - name: YAKS Report if: failure() run: | diff --git a/experimental/aws-ddb-sink.exp.kamelet.yaml b/kamelets/aws-ddb-experimental-sink.kamelet.yaml similarity index 95% rename from experimental/aws-ddb-sink.exp.kamelet.yaml rename to kamelets/aws-ddb-experimental-sink.kamelet.yaml index e19185fa0..a98ecb446 100644 --- a/experimental/aws-ddb-sink.exp.kamelet.yaml +++ b/kamelets/aws-ddb-experimental-sink.kamelet.yaml @@ -18,9 +18,9 @@ apiVersion: camel.apache.org/v1alpha1 kind: Kamelet metadata: - name: aws-ddb-sink-experimental + name: aws-ddb-experimental-sink annotations: - camel.apache.org/kamelet.support.level: "Experiemental" + camel.apache.org/kamelet.support.level: "Experimental" camel.apache.org/catalog.version: "main-SNAPSHOT" camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNMzkuNTA2IDEzLjY2N2gyMC4zMjF2NzEuOTk5SDM5LjUwNnpNNzQuMTMxIDY3LjU2NFY3OC41Mmw3LjQ4LTguNzY0di03LjYwMmwtNy40MzcgNS4zOTd6TTc0LjEzMSA2Mi45MzZsLjA0My0uMDEgNy40MzctNHYtNy42NDlsLTcuNDguNjg4ek03NC4xNzQgMzYuNDI5bC0uMDQzLS4wMVY0Ny4zNWw3LjQ4LjY5OXYtNy42NDV6Ii8+PHBhdGggZmlsbD0iIzFBNDc2RiIgZD0iTTgxLjYxMSA0OC4wNDlsLTcuNDgtLjY5OS0xNC4zMDMtLjU3MkgzOS41MDZsLTE0LjMwMy41NzJWMzYuNDQzbC0uMDE1LjAwOC4wMTUtLjAzMiAxNC4zMDMtMy4zMTRINTkuODI4bDE0LjMwMyAzLjMxNCA1LjI1OCAyLjc5NXYtMS43OTdsMi4yMjItLjI0My03LjQ4LTUuNDEtMTQuMzAzLTQuNDMySDM5LjUwNmwtMTQuMzAzIDQuNDMyVjIwLjgwN2wtNy40OCA4Ljc2M3Y3LjY1M2wuMDU4LS4wNDIgMi4xNjQuMjM2djEuODM0bC0yLjIyMiAxLjE4OXY3LjYxNWwuMDU4LS4wMDYgMi4xNjQuMDMydjMuMTk2bC0xLjg2Ny4wMjgtLjM1NS0uMDM0djcuNjE4bDIuMjIyIDEuMTk1djEuODU1bC0yLjEyOS4yMzUtLjA5My0uMDd2Ny42NTJsNy40OCA4Ljc2NFY2Ny41NjRsMTQuMzAzIDQuNDMySDU5LjgyOGwxNC4zNDUtNC40NDUgNy40MzgtNS4zNjctMi4yMjItLjI0NXYtMS44MThsLTUuMjE2IDIuODA1LTE0LjM0NSAzLjI5NXYuMDA0SDM5LjUwNnYtLjAwNGwtMTQuMzQ4LTMuMjk1LS4wMjUtLjA1MS4wNy4wMzdWNTEuOTY1bDE0LjMwMy41N3YuMDE0SDU5LjgyOHYtLjAxNGwxNC4zMDMtLjU3IDcuNDgtLjY1Ni0yLjIyMi0uMDMydi0zLjE5NnoiLz48L3N2Zz4=" camel.apache.org/provider: "Apache Software Foundation" @@ -29,7 +29,7 @@ metadata: camel.apache.org/kamelet.type: "sink" spec: definition: - title: "AWS DynamoDB Sink" + title: "AWS DynamoDB Experimental Sink" description: |- Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table. @@ -37,7 +37,9 @@ spec: If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method. - This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. + This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. + + This Kamelet supports experimental input format to specify the data type that that is given to this sink. The Kamelet will do best effort to convert the provided input type to the required input for the sink. required: - table - region diff --git a/experimental/aws-s3-source.exp.kamelet.yaml b/kamelets/aws-s3-experimental-source.kamelet.yaml similarity index 96% rename from experimental/aws-s3-source.exp.kamelet.yaml rename to kamelets/aws-s3-experimental-source.kamelet.yaml index 7a8d8fe58..504157c16 100644 --- a/experimental/aws-s3-source.exp.kamelet.yaml +++ b/kamelets/aws-s3-experimental-source.kamelet.yaml @@ -1,7 +1,7 @@ apiVersion: camel.apache.org/v1alpha1 kind: Kamelet metadata: - name: aws-s3-source-experimental + name: aws-s3-experimental-source annotations: camel.apache.org/kamelet.support.level: "Experimental" camel.apache.org/catalog.version: "main-SNAPSHOT" @@ -12,7 +12,7 @@ metadata: camel.apache.org/kamelet.type: "source" spec: definition: - title: "AWS S3 Source" + title: "AWS S3 Experimental Source" description: |- Receive data from an Amazon S3 Bucket. @@ -20,7 +20,9 @@ spec: If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method. - Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name + Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name. + + This Kamelet supports experimental output format to specify the data type produced by this source. Users of the Kamelet are able to choose from different output types. required: - bucketNameOrArn - region diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml new file mode 100644 index 000000000..a98ecb446 --- /dev/null +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml @@ -0,0 +1,148 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: Kamelet +metadata: + name: aws-ddb-experimental-sink + annotations: + camel.apache.org/kamelet.support.level: "Experimental" + camel.apache.org/catalog.version: "main-SNAPSHOT" + camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNMzkuNTA2IDEzLjY2N2gyMC4zMjF2NzEuOTk5SDM5LjUwNnpNNzQuMTMxIDY3LjU2NFY3OC41Mmw3LjQ4LTguNzY0di03LjYwMmwtNy40MzcgNS4zOTd6TTc0LjEzMSA2Mi45MzZsLjA0My0uMDEgNy40MzctNHYtNy42NDlsLTcuNDguNjg4ek03NC4xNzQgMzYuNDI5bC0uMDQzLS4wMVY0Ny4zNWw3LjQ4LjY5OXYtNy42NDV6Ii8+PHBhdGggZmlsbD0iIzFBNDc2RiIgZD0iTTgxLjYxMSA0OC4wNDlsLTcuNDgtLjY5OS0xNC4zMDMtLjU3MkgzOS41MDZsLTE0LjMwMy41NzJWMzYuNDQzbC0uMDE1LjAwOC4wMTUtLjAzMiAxNC4zMDMtMy4zMTRINTkuODI4bDE0LjMwMyAzLjMxNCA1LjI1OCAyLjc5NXYtMS43OTdsMi4yMjItLjI0My03LjQ4LTUuNDEtMTQuMzAzLTQuNDMySDM5LjUwNmwtMTQuMzAzIDQuNDMyVjIwLjgwN2wtNy40OCA4Ljc2M3Y3LjY1M2wuMDU4LS4wNDIgMi4xNjQuMjM2djEuODM0bC0yLjIyMiAxLjE4OXY3LjYxNWwuMDU4LS4wMDYgMi4xNjQuMDMydjMuMTk2bC0xLjg2Ny4wMjgtLjM1NS0uMDM0djcuNjE4bDIuMjIyIDEuMTk1djEuODU1bC0yLjEyOS4yMzUtLjA5My0uMDd2Ny42NTJsNy40OCA4Ljc2NFY2Ny41NjRsMTQuMzAzIDQuNDMySDU5LjgyOGwxNC4zNDUtNC40NDUgNy40MzgtNS4zNjctMi4yMjItLjI0NXYtMS44MThsLTUuMjE2IDIuODA1LTE0LjM0NSAzLjI5NXYuMDA0SDM5LjUwNnYtLjAwNGwtMTQuMzQ4LTMuMjk1LS4wMjUtLjA1MS4wNy4wMzdWNTEuOTY1bDE0LjMwMy41N3YuMDE0SDU5LjgyOHYtLjAxNGwxNC4zMDMtLjU3IDcuNDgtLjY1Ni0yLjIyMi0uMDMydi0zLjE5NnoiLz48L3N2Zz4=" + camel.apache.org/provider: "Apache Software Foundation" + camel.apache.org/kamelet.group: "AWS DynamoDB Streams" + labels: + camel.apache.org/kamelet.type: "sink" +spec: + definition: + title: "AWS DynamoDB Experimental Sink" + description: |- + Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table. + + The basic authentication method for the AWS DynamoDB service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider. + + If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method. + + This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. + + This Kamelet supports experimental input format to specify the data type that that is given to this sink. The Kamelet will do best effort to convert the provided input type to the required input for the sink. + required: + - table + - region + type: object + properties: + table: + title: Table + description: The name of the DynamoDB table. + type: string + accessKey: + title: Access Key + description: The access key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + secretKey: + title: Secret Key + description: The secret key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + region: + title: AWS Region + description: The AWS region to access. + type: string + enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "aws-cn-global", "aws-us-gov-global", "aws-iso-global", "aws-iso-b-global"] + operation: + title: Operation + description: "The operation to perform. The options are PutItem, UpdateItem, or DeleteItem." + type: string + default: PutItem + example: PutItem + writeCapacity: + title: Write Capacity + description: The provisioned throughput to reserve for writing resources to your table. + type: integer + default: 1 + useDefaultCredentialsProvider: + title: Default Credentials Provider + description: If true, the DynamoDB client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key). + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + uriEndpointOverride: + title: Overwrite Endpoint URI + description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option. + type: string + overrideEndpoint: + title: Endpoint Overwrite + description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + inputFormat: + title: Input Type + description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type. + type: string + default: json + example: json + types: + in: + mediaType: application/json + dependencies: + - github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT + - "camel:core" + - "camel:jackson" + - "camel:aws2-ddb" + - "camel:kamelet" + template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: inputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-ddb' + - key: format + value: '{{inputFormat}}' + - key: registry + value: '#bean:{{dataTypeRegistry}}' + from: + uri: "kamelet:source" + steps: + - set-property: + name: operation + constant: "{{operation}}" + - process: + ref: "{{inputTypeProcessor}}" + - to: + uri: "aws2-ddb:{{table}}" + parameters: + secretKey: "{{?secretKey}}" + accessKey: "{{?accessKey}}" + region: "{{region}}" + operation: "{{operation}}" + writeCapacity: "{{?writeCapacity}}" + useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}" + uriEndpointOverride: "{{?uriEndpointOverride}}" + overrideEndpoint: "{{overrideEndpoint}}" diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml new file mode 100644 index 000000000..504157c16 --- /dev/null +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml @@ -0,0 +1,167 @@ +apiVersion: camel.apache.org/v1alpha1 +kind: Kamelet +metadata: + name: aws-s3-experimental-source + annotations: + camel.apache.org/kamelet.support.level: "Experimental" + camel.apache.org/catalog.version: "main-SNAPSHOT" + camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHg9IjAiIHk9IjAiIHZpZXdCb3g9IjAgMCAyNDguMiAzMDAiIHhtbDpzcGFjZT0icHJlc2VydmUiPjxzdHlsZT4uc3QyOHtmaWxsOiM4YzMxMjN9LnN0Mjl7ZmlsbDojZTA1MjQzfTwvc3R5bGU+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik0yMCA1Mi4xTDAgNjJ2MTc1LjVsMjAgOS45LjEtLjFWNTIuMmwtLjEtLjEiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNyAyMjJMMjAgMjQ3LjVWNTIuMUwxMjcgNzd2MTQ1Ii8+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik03OC43IDE4Mi4xbDQ1LjQgNS44LjMtLjcuMy03NC40LS41LS42LTQ1LjQgNS43LS4xIDY0LjIiLz48cGF0aCBjbGFzcz0ic3QyOCIgZD0iTTEyNC4xIDIyMi4zbDEwNC4xIDI1LjIuMi0uM1Y1Mi4xbC0uMi0uMi0xMDQuMSAyNS40djE0NSIvPjxwYXRoIGNsYXNzPSJzdDI5IiBkPSJNMTY5LjUgMTgyLjFsLTQ1LjQgNS44di03NS43bDQ1LjQgNS43djY0LjIiLz48cGF0aCBkPSJNMTY5LjUgODYuOWwtNDUuNCA4LjMtNDUuNC04LjNMMTI0IDc1bDQ1LjUgMTEuOSIgZmlsbD0iIzVlMWYxOCIvPjxwYXRoIGQ9Ik0xNjkuNSAyMTMuMWwtNDUuNC04LjMtNDUuNCA4LjMgNDUuMyAxMi43IDQ1LjUtMTIuNyIgZmlsbD0iI2YyYjBhOSIvPjxwYXRoIGNsYXNzPSJzdDI4IiBkPSJNNzguNyA4Ni45bDQ1LjQtMTEuMi40LS4xVi4zbC0uNC0uMy00NS40IDIyLjd2NjQuMiIvPjxwYXRoIGNsYXNzPSJzdDI5IiBkPSJNMTY5LjUgODYuOWwtNDUuNC0xMS4yVjBsNDUuNCAyMi43djY0LjIiLz48cGF0aCBjbGFzcz0ic3QyOCIgZD0iTTEyNC4xIDMwMGwtNDUuNC0yMi43di02NC4ybDQ1LjQgMTEuMi43LjgtLjIgNzMuNi0uNSAxLjMiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNC4xIDMwMGw0NS40LTIyLjd2LTY0LjJsLTQ1LjQgMTEuMlYzMDBNMjI4LjIgNTIuMWwyMCAxMHYxNzUuNWwtMjAgMTBWNTIuMSIvPjwvc3ZnPg==" + camel.apache.org/provider: "Apache Software Foundation" + camel.apache.org/kamelet.group: "AWS S3" + labels: + camel.apache.org/kamelet.type: "source" +spec: + definition: + title: "AWS S3 Experimental Source" + description: |- + Receive data from an Amazon S3 Bucket. + + The basic authentication method for the S3 service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider. + + If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method. + + Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name. + + This Kamelet supports experimental output format to specify the data type produced by this source. Users of the Kamelet are able to choose from different output types. + required: + - bucketNameOrArn + - region + type: object + properties: + bucketNameOrArn: + title: Bucket Name + description: The S3 Bucket name or Amazon Resource Name (ARN). + type: string + deleteAfterRead: + title: Auto-delete Objects + description: Specifies to delete objects after consuming them. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: true + accessKey: + title: Access Key + description: The access key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + secretKey: + title: Secret Key + description: The secret key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + region: + title: AWS Region + description: The AWS region to access. + type: string + enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "aws-cn-global", "aws-us-gov-global", "aws-iso-global", "aws-iso-b-global"] + autoCreateBucket: + title: Autocreate Bucket + description: Specifies to automatically create the S3 bucket. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + includeBody: + title: Include Body + description: If true, the exchange is consumed and put into the body and closed. If false, the S3Object stream is put raw into the body and the headers are set with the S3 object metadata. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: true + prefix: + title: Prefix + description: The AWS S3 bucket prefix to consider while searching. + type: string + example: 'folder/' + ignoreBody: + title: Ignore Body + description: If true, the S3 Object body is ignored. Setting this to true overrides any behavior defined by the `includeBody` option. If false, the S3 object is put in the body. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + useDefaultCredentialsProvider: + title: Default Credentials Provider + description: If true, the S3 client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key). + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + uriEndpointOverride: + title: Overwrite Endpoint URI + description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option. + type: string + overrideEndpoint: + title: Endpoint Overwrite + description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + delay: + title: Delay + description: The number of milliseconds before the next poll of the selected bucket. + type: integer + default: 500 + outputFormat: + title: Output Type + description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type. + type: string + default: binary + example: binary + dependencies: + - "camel:core" + - "camel:aws2-s3" + - "github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT" + - "camel:kamelet" + template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: outputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-s3' + - key: format + value: '{{outputFormat}}' + - key: registry + value: '#bean:{{dataTypeRegistry}}' + - name: renameHeaders + type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" + property: + - key: prefix + value: 'CamelAwsS3' + - key: renamingPrefix + value: 'aws.s3.' + - key: mode + value: 'filtering' + - key: selectedHeaders + value: 'CamelAwsS3Key,CamelAwsS3BucketName' + from: + uri: "aws2-s3:{{bucketNameOrArn}}" + parameters: + autoCreateBucket: "{{autoCreateBucket}}" + secretKey: "{{?secretKey}}" + accessKey: "{{?accessKey}}" + region: "{{region}}" + includeBody: "{{includeBody}}" + ignoreBody: "{{ignoreBody}}" + deleteAfterRead: "{{deleteAfterRead}}" + prefix: "{{?prefix}}" + useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}" + uriEndpointOverride: "{{?uriEndpointOverride}}" + overrideEndpoint: "{{overrideEndpoint}}" + delay: "{{delay}}" + steps: + - process: + ref: "{{renameHeaders}}" + - process: + ref: "{{outputTypeProcessor}}" + - to: "kamelet:sink" diff --git a/experimental/test/aws-ddb-sink/amazonDDBClient.groovy b/test/experimental/aws-ddb-sink-exp/amazonDDBClient.groovy similarity index 100% rename from experimental/test/aws-ddb-sink/amazonDDBClient.groovy rename to test/experimental/aws-ddb-sink-exp/amazonDDBClient.groovy diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml similarity index 95% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml index 6b4b2b024..d1e5fb440 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml @@ -18,7 +18,7 @@ apiVersion: camel.apache.org/v1alpha1 kind: KameletBinding metadata: - name: aws-ddb-sink-binding + name: aws-ddb-experimental-sink-binding spec: source: ref: @@ -39,7 +39,7 @@ spec: ref: kind: Kamelet apiVersion: camel.apache.org/v1alpha1 - name: aws-ddb-sink-experimental + name: aws-ddb-experimental-sink properties: table: ${aws.ddb.tableName} operation: ${aws.ddb.operation} diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature similarity index 87% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature index 6c54fdc36..d535b82f7 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature @@ -18,7 +18,7 @@ Feature: AWS DDB Sink - DeleteItem Background: - Given Kamelet aws-ddb-sink-experimental is available + Given Kamelet aws-ddb-experimental-sink is available Given Camel K resource polling configuration | maxAttempts | 200 | | delayBetweenAttempts | 2000 | @@ -48,9 +48,9 @@ Feature: AWS DDB Sink - DeleteItem Scenario: Create AWS-DDB Kamelet sink binding When load KameletBinding aws-ddb-sink-binding.yaml - And KameletBinding aws-ddb-sink-binding is available - And Camel K integration aws-ddb-sink-binding is running - And Camel K integration aws-ddb-sink-binding should print Routes startup + And KameletBinding aws-ddb-experimental-sink-binding is available + And Camel K integration aws-ddb-experimental-sink-binding is running + And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup Then sleep 10sec Scenario: Verify Kamelet sink @@ -59,7 +59,7 @@ Feature: AWS DDB Sink - DeleteItem Then run script verifyItems.groovy Scenario: Remove Camel K resources - Given delete KameletBinding aws-ddb-sink-binding + Given delete KameletBinding aws-ddb-experimental-sink-binding Scenario: Stop container Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature similarity index 86% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature index f117889b9..637b1dab7 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature @@ -18,7 +18,7 @@ Feature: AWS DDB Sink - PutItem Background: - Given Kamelet aws-ddb-sink-experimental is available + Given Kamelet aws-ddb-experimental-sink is available Given Camel K resource polling configuration | maxAttempts | 200 | | delayBetweenAttempts | 2000 | @@ -43,16 +43,16 @@ Feature: AWS DDB Sink - PutItem Scenario: Create AWS-DDB Kamelet sink binding When load KameletBinding aws-ddb-sink-binding.yaml - And KameletBinding aws-ddb-sink-binding is available - And Camel K integration aws-ddb-sink-binding is running - And Camel K integration aws-ddb-sink-binding should print Routes startup + And KameletBinding aws-ddb-experimental-sink-binding is available + And Camel K integration aws-ddb-experimental-sink-binding is running + And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup Then sleep 10sec Scenario: Verify Kamelet sink Then run script verifyItems.groovy Scenario: Remove Camel K resources - Given delete KameletBinding aws-ddb-sink-binding + Given delete KameletBinding aws-ddb-experimental-sink-binding Scenario: Stop container Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature similarity index 89% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature index 215adbe21..5a0a29c1c 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature @@ -18,7 +18,7 @@ Feature: AWS DDB Sink - UpdateItem Background: - Given Kamelet aws-ddb-sink-experimental is available + Given Kamelet aws-ddb-experimental-sink is available Given Camel K resource polling configuration | maxAttempts | 200 | | delayBetweenAttempts | 2000 | @@ -50,9 +50,9 @@ Feature: AWS DDB Sink - UpdateItem Scenario: Create AWS-DDB Kamelet sink binding When load KameletBinding aws-ddb-sink-binding.yaml - And KameletBinding aws-ddb-sink-binding is available - And Camel K integration aws-ddb-sink-binding is running - And Camel K integration aws-ddb-sink-binding should print Routes startup + And KameletBinding aws-ddb-experimental-sink-binding is available + And Camel K integration aws-ddb-experimental-sink-binding is running + And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup Then sleep 10sec Scenario: Verify Kamelet sink @@ -62,7 +62,7 @@ Feature: AWS DDB Sink - UpdateItem Then run script verifyItems.groovy Scenario: Remove Camel K resources - Given delete KameletBinding aws-ddb-sink-binding + Given delete KameletBinding aws-ddb-experimental-sink-binding Scenario: Stop container Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/putItem.groovy b/test/experimental/aws-ddb-sink-exp/putItem.groovy similarity index 100% rename from experimental/test/aws-ddb-sink/putItem.groovy rename to test/experimental/aws-ddb-sink-exp/putItem.groovy diff --git a/experimental/test/aws-ddb-sink/verifyItems.groovy b/test/experimental/aws-ddb-sink-exp/verifyItems.groovy similarity index 100% rename from experimental/test/aws-ddb-sink/verifyItems.groovy rename to test/experimental/aws-ddb-sink-exp/verifyItems.groovy diff --git a/experimental/test/aws-ddb-sink/yaks-config.yaml b/test/experimental/aws-ddb-sink-exp/yaks-config.yaml similarity index 93% rename from experimental/test/aws-ddb-sink/yaks-config.yaml rename to test/experimental/aws-ddb-sink-exp/yaks-config.yaml index 51cf3b527..15156f088 100644 --- a/experimental/test/aws-ddb-sink/yaks-config.yaml +++ b/test/experimental/aws-ddb-sink-exp/yaks-config.yaml @@ -56,7 +56,3 @@ config: failedOnly: true includes: - app=camel-k -pre: - - name: Install experimental Kamelets - run: | - kubectl apply -f ../../aws-ddb-sink.exp.kamelet.yaml -n $YAKS_NAMESPACE diff --git a/experimental/test/aws-s3/amazonS3Client.groovy b/test/experimental/aws-s3-exp/amazonS3Client.groovy similarity index 100% rename from experimental/test/aws-s3/amazonS3Client.groovy rename to test/experimental/aws-s3-exp/amazonS3Client.groovy diff --git a/experimental/test/aws-s3/aws-s3-cloudevents.feature b/test/experimental/aws-s3-exp/aws-s3-cloudevents.feature similarity index 86% rename from experimental/test/aws-s3/aws-s3-cloudevents.feature rename to test/experimental/aws-s3-exp/aws-s3-cloudevents.feature index 6f5513fc2..2ce2d0d60 100644 --- a/experimental/test/aws-s3/aws-s3-cloudevents.feature +++ b/test/experimental/aws-s3-exp/aws-s3-cloudevents.feature @@ -3,7 +3,7 @@ Feature: AWS S3 Kamelet - cloud events data type Background: - Given Kamelet aws-s3-source-experimental is available + Given Kamelet aws-s3-experimental-source is available Given Knative event consumer timeout is 20000 ms Given Camel K resource polling configuration | maxAttempts | 200 | @@ -30,9 +30,9 @@ Feature: AWS S3 Kamelet - cloud events data type Scenario: Create AWS-S3 Kamelet to Knative binding Given variable loginfo is "Installed features" When load KameletBinding aws-s3-to-knative.yaml - And KameletBinding aws-s3-to-knative is available - And Camel K integration aws-s3-to-knative is running - Then Camel K integration aws-s3-to-knative should print ${loginfo} + And KameletBinding aws-s3-to-knative-binding is available + And Camel K integration aws-s3-to-knative-binding is running + Then Camel K integration aws-s3-to-knative-binding should print ${loginfo} Scenario: Verify Kamelet source Given create Knative event consumer service event-consumer-service @@ -48,7 +48,7 @@ Feature: AWS S3 Kamelet - cloud events data type | id | @ignore@ | Scenario: Remove Camel K resources - Given delete KameletBinding aws-s3-to-knative + Given delete KameletBinding aws-s3-to-knative-binding Given delete Kubernetes service event-consumer-service Scenario: Stop container diff --git a/experimental/test/aws-s3/aws-s3-knative.feature b/test/experimental/aws-s3-exp/aws-s3-knative.feature similarity index 85% rename from experimental/test/aws-s3/aws-s3-knative.feature rename to test/experimental/aws-s3-exp/aws-s3-knative.feature index 8a6512a9c..bb1bebd39 100644 --- a/experimental/test/aws-s3/aws-s3-knative.feature +++ b/test/experimental/aws-s3-exp/aws-s3-knative.feature @@ -3,7 +3,7 @@ Feature: AWS S3 Kamelet - Knative binding Background: - Given Kamelet aws-s3-source-experimental is available + Given Kamelet aws-s3-experimental-source is available Given Knative event consumer timeout is 20000 ms Given Camel K resource polling configuration | maxAttempts | 200 | @@ -30,9 +30,9 @@ Feature: AWS S3 Kamelet - Knative binding Scenario: Create AWS-S3 Kamelet to Knative binding Given variable loginfo is "Installed features" When load KameletBinding aws-s3-to-knative.yaml - And KameletBinding aws-s3-to-knative is available - And Camel K integration aws-s3-to-knative is running - Then Camel K integration aws-s3-to-knative should print ${loginfo} + And KameletBinding aws-s3-to-knative-binding is available + And Camel K integration aws-s3-to-knative-binding is running + Then Camel K integration aws-s3-to-knative-binding should print ${loginfo} Scenario: Verify Kamelet source Given create Knative event consumer service event-consumer-service @@ -47,7 +47,7 @@ Feature: AWS S3 Kamelet - Knative binding | id | @ignore@ | Scenario: Remove Camel K resources - Given delete KameletBinding aws-s3-to-knative + Given delete KameletBinding aws-s3-to-knative-binding Given delete Kubernetes service event-consumer-service Given delete Knative broker default diff --git a/experimental/test/aws-s3/aws-s3-to-knative.yaml b/test/experimental/aws-s3-exp/aws-s3-to-knative.yaml similarity index 95% rename from experimental/test/aws-s3/aws-s3-to-knative.yaml rename to test/experimental/aws-s3-exp/aws-s3-to-knative.yaml index afa1b5725..117c33322 100644 --- a/experimental/test/aws-s3/aws-s3-to-knative.yaml +++ b/test/experimental/aws-s3-exp/aws-s3-to-knative.yaml @@ -18,13 +18,13 @@ apiVersion: camel.apache.org/v1alpha1 kind: KameletBinding metadata: - name: aws-s3-to-knative + name: aws-s3-to-knative-binding spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1alpha1 - name: aws-s3-source-experimental + name: aws-s3-experimental-source properties: bucketNameOrArn: ${aws.s3.bucketNameOrArn} overrideEndpoint: true diff --git a/experimental/test/aws-s3/yaks-config.yaml b/test/experimental/aws-s3-exp/yaks-config.yaml similarity index 94% rename from experimental/test/aws-s3/yaks-config.yaml rename to test/experimental/aws-s3-exp/yaks-config.yaml index 6431eaf81..33d55aac4 100644 --- a/experimental/test/aws-s3/yaks-config.yaml +++ b/test/experimental/aws-s3-exp/yaks-config.yaml @@ -63,7 +63,3 @@ config: failedOnly: true includes: - app=camel-k -pre: - - name: Install experimental Kamelets - run: | - kubectl apply -f ../../aws-s3-source.exp.kamelet.yaml -n $YAKS_NAMESPACE