From e79d4474f63762003eb48f291b2676de36250313 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Mon, 14 Nov 2022 13:15:00 +0100 Subject: [PATCH 01/28] Introduce Kamelet input/output data types - Introduce data type converters - Add data type processor to auto convert exchange message from/to given data type - Let user choose which data type to use (via Kamelet property) - Add data type registry and annotation based loader to find data type implementations by component scheme and name Relates to CAMEL-18698 and apache/camel-k#1980 --- .github/workflows/yaks-tests.yaml | 3 +- kamelets/aws-ddb-sink.kamelet.yaml | 27 ++- kamelets/aws-s3-source.kamelet.yaml | 17 ++ library/camel-kamelets-utils/pom.xml | 7 +- .../format/AnnotationDataTypeLoader.java | 152 +++++++++++++++++ .../utils/format/DataTypeProcessor.java | 67 ++++++++ .../format/DefaultDataTypeConverter.java | 54 ++++++ .../utils/format/DefaultDataTypeRegistry.java | 154 ++++++++++++++++++ .../aws2/ddb/Ddb2JsonInputType.java} | 87 +++++++--- .../aws2/s3/AWS2S3BinaryOutputType.java | 55 +++++++ .../aws2/s3/AWS2S3JsonOutputType.java | 63 +++++++ .../converter/standard/JsonModelDataType.java | 66 ++++++++ .../utils/format/spi/DataTypeConverter.java | 39 +++++ .../utils/format/spi/DataTypeLoader.java | 31 ++++ .../utils/format/spi/DataTypeRegistry.java | 60 +++++++ .../format/spi/annotations/DataType.java | 51 ++++++ .../services/org/apache/camel/DataType | 20 +++ .../format/DefaultDataTypeRegistryTest.java | 57 +++++++ .../aws2/ddb/Ddb2JsonInputTypeTest.java} | 104 ++++++++---- .../aws2/s3/AWS2S3JsonOutputTypeTest.java | 98 +++++++++++ .../standard/JsonModelDataTypeTest.java | 84 ++++++++++ .../src/test/resources/log4j2-test.xml | 32 ++++ .../kamelets/aws-ddb-sink.kamelet.yaml | 27 ++- .../kamelets/aws-s3-source.kamelet.yaml | 17 ++ test/aws-s3/README.md | 76 +++++++++ test/aws-s3/amazonS3Client.groovy | 36 ++++ test/aws-s3/aws-s3-credentials.properties | 7 + test/aws-s3/aws-s3-inmem-binding.feature | 49 ++++++ .../aws-s3-source-property-conf.feature | 37 +++++ test/aws-s3/aws-s3-source-secret-conf.feature | 39 +++++ test/aws-s3/aws-s3-source-uri-conf.feature | 32 ++++ test/aws-s3/aws-s3-to-inmem.yaml | 39 +++++ test/aws-s3/aws-s3-to-log-secret-based.groovy | 21 +++ test/aws-s3/aws-s3-to-log-uri-based.groovy | 29 ++++ test/aws-s3/aws-s3-uri-binding.feature | 35 ++++ test/aws-s3/aws-s3-uri-binding.yaml | 37 +++++ test/aws-s3/yaks-config.yaml | 65 ++++++++ test/utils/inmem-to-log.yaml | 29 ++++ 38 files changed, 1829 insertions(+), 74 deletions(-) create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java rename library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/{transform/aws/ddb/JsonToDdbModelConverter.java => format/converter/aws2/ddb/Ddb2JsonInputType.java} (69%) create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java create mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java rename library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/{transform/aws/ddb/JsonToDdbModelConverterTest.java => format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java} (65%) create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java create mode 100644 library/camel-kamelets-utils/src/test/resources/log4j2-test.xml create mode 100644 test/aws-s3/README.md create mode 100644 test/aws-s3/amazonS3Client.groovy create mode 100644 test/aws-s3/aws-s3-credentials.properties create mode 100644 test/aws-s3/aws-s3-inmem-binding.feature create mode 100644 test/aws-s3/aws-s3-source-property-conf.feature create mode 100644 test/aws-s3/aws-s3-source-secret-conf.feature create mode 100644 test/aws-s3/aws-s3-source-uri-conf.feature create mode 100644 test/aws-s3/aws-s3-to-inmem.yaml create mode 100644 test/aws-s3/aws-s3-to-log-secret-based.groovy create mode 100644 test/aws-s3/aws-s3-to-log-uri-based.groovy create mode 100644 test/aws-s3/aws-s3-uri-binding.feature create mode 100644 test/aws-s3/aws-s3-uri-binding.yaml create mode 100644 test/aws-s3/yaks-config.yaml create mode 100644 test/utils/inmem-to-log.yaml diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml index 46acc6260..defc5733b 100644 --- a/.github/workflows/yaks-tests.yaml +++ b/.github/workflows/yaks-tests.yaml @@ -43,7 +43,7 @@ concurrency: env: CAMEL_K_VERSION: 1.10.3 YAKS_VERSION: 0.11.0 - YAKS_IMAGE_NAME: "docker.io/yaks/yaks" + YAKS_IMAGE_NAME: "docker.io/citrusframework/yaks" YAKS_RUN_OPTIONS: "--timeout=15m" jobs: @@ -110,6 +110,7 @@ jobs: run: | echo "Running tests" yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS + yaks run test/aws-s3 $YAKS_RUN_OPTIONS yaks run test/extract-field-action $YAKS_RUN_OPTIONS yaks run test/insert-field-action $YAKS_RUN_OPTIONS yaks run test/mail-sink $YAKS_RUN_OPTIONS diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml index 5b603abfc..ba2003478 100644 --- a/kamelets/aws-ddb-sink.kamelet.yaml +++ b/kamelets/aws-ddb-sink.kamelet.yaml @@ -97,6 +97,12 @@ spec: x-descriptors: - 'urn:alm:descriptor:com.tectonic.ui:checkbox' default: false + inputFormat: + title: Input Type + description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type. + type: string + default: json + example: json types: in: mediaType: application/json @@ -107,17 +113,24 @@ spec: - "camel:aws2-ddb" - "camel:kamelet" template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: inputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-ddb' + - key: format + value: '{{inputFormat}}' from: uri: "kamelet:source" steps: - set-property: - name: operation - constant: "{{operation}}" - - unmarshal: - json: - library: Jackson - unmarshalType: com.fasterxml.jackson.databind.JsonNode - - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter" + name: operation + constant: "{{operation}}" + - process: + ref: "{{inputTypeProcessor}}" - to: uri: "aws2-ddb:{{table}}" parameters: diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml index 6ab2bca41..e09cf4aa2 100644 --- a/kamelets/aws-s3-source.kamelet.yaml +++ b/kamelets/aws-s3-source.kamelet.yaml @@ -107,6 +107,12 @@ spec: description: The number of milliseconds before the next poll of the selected bucket. type: integer default: 500 + outputFormat: + title: Output Type + description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type. + type: string + default: binary + example: binary dependencies: - "camel:core" - "camel:aws2-s3" @@ -114,6 +120,15 @@ spec: - "camel:kamelet" template: beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: outputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-s3' + - key: format + value: '{{outputFormat}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: @@ -143,4 +158,6 @@ spec: steps: - process: ref: "{{renameHeaders}}" + - process: + ref: "{{outputTypeProcessor}}" - to: "kamelet:sink" diff --git a/library/camel-kamelets-utils/pom.xml b/library/camel-kamelets-utils/pom.xml index 4f848d36c..5b1441f31 100644 --- a/library/camel-kamelets-utils/pom.xml +++ b/library/camel-kamelets-utils/pom.xml @@ -71,12 +71,17 @@ camel-kafka - + org.apache.camel camel-aws2-ddb provided + + org.apache.camel + camel-aws2-s3 + provided + diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java new file mode 100644 index 000000000..96ca50eb9 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.util.Enumeration; +import java.util.HashSet; +import java.util.Set; + +import org.apache.camel.TypeConverterLoaderException; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; +import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; +import org.apache.camel.spi.Injector; +import org.apache.camel.spi.PackageScanClassResolver; +import org.apache.camel.util.IOHelper; +import org.apache.camel.util.ObjectHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Data type loader scans packages for {@link DataTypeConverter} classes annotated with {@link DataType} annotation. + */ +public class AnnotationDataTypeLoader implements DataTypeLoader { + + public static final String META_INF_SERVICES = "META-INF/services/org/apache/camel/DataType"; + + private static final Logger LOG = LoggerFactory.getLogger(AnnotationDataTypeLoader.class); + + protected final PackageScanClassResolver resolver; + protected final Injector injector; + + protected Set> visitedClasses = new HashSet<>(); + protected Set visitedURIs = new HashSet<>(); + + public AnnotationDataTypeLoader(Injector injector, PackageScanClassResolver resolver) { + this.injector = injector; + this.resolver = resolver; + } + + @Override + public void load(DataTypeRegistry registry) { + Set packages = new HashSet<>(); + + LOG.trace("Searching for {} services", META_INF_SERVICES); + try { + ClassLoader ccl = Thread.currentThread().getContextClassLoader(); + if (ccl != null) { + findPackages(packages, ccl); + } + findPackages(packages, getClass().getClassLoader()); + if (packages.isEmpty()) { + LOG.debug("No package names found to be used for classpath scanning for annotated data types."); + return; + } + } catch (Exception e) { + throw new TypeConverterLoaderException( + "Cannot find package names to be used for classpath scanning for annotated data types.", e); + } + + // if there is any packages to scan and load @DataType classes, then do it + if (LOG.isTraceEnabled()) { + LOG.trace("Found data type packages to scan: {}", String.join(", ", packages)); + } + Set> scannedClasses = resolver.findAnnotated(DataType.class, packages.toArray(new String[]{})); + if (!scannedClasses.isEmpty()) { + LOG.debug("Found {} packages with {} @DataType classes to load", packages.size(), scannedClasses.size()); + + // load all the found classes into the type data type registry + for (Class type : scannedClasses) { + if (acceptClass(type)) { + if (LOG.isTraceEnabled()) { + LOG.trace("Loading data type annotation: {}", ObjectHelper.name(type)); + } + loadDataType(registry, type); + } + } + } + + // now clear the maps so we do not hold references + visitedClasses.clear(); + visitedURIs.clear(); + } + + private void loadDataType(DataTypeRegistry registry, Class type) { + if (visitedClasses.contains(type)) { + return; + } + visitedClasses.add(type); + + try { + if (DataTypeConverter.class.isAssignableFrom(type) && type.isAnnotationPresent(DataType.class)) { + DataType dt = type.getAnnotation(DataType.class); + DataTypeConverter converter = (DataTypeConverter) injector.newInstance(type); + registry.addDataTypeConverter(dt.scheme(), converter); + } + } catch (NoClassDefFoundError e) { + LOG.debug("Ignoring converter type: {} as a dependent class could not be found: {}", + type.getCanonicalName(), e, e); + } + } + + protected boolean acceptClass(Class type) { + return true; + } + + protected void findPackages(Set packages, ClassLoader classLoader) throws IOException { + Enumeration resources = classLoader.getResources(META_INF_SERVICES); + while (resources.hasMoreElements()) { + URL url = resources.nextElement(); + String path = url.getPath(); + if (!visitedURIs.contains(path)) { + // remember we have visited this uri so we wont read it twice + visitedURIs.add(path); + LOG.debug("Loading file {} to retrieve list of packages, from url: {}", META_INF_SERVICES, url); + try (BufferedReader reader = IOHelper.buffered(new InputStreamReader(url.openStream(), StandardCharsets.UTF_8))) { + while (true) { + String line = reader.readLine(); + if (line == null) { + break; + } + line = line.trim(); + if (line.startsWith("#") || line.length() == 0) { + continue; + } + packages.add(line); + } + } + } + } + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java new file mode 100644 index 000000000..859269fe4 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import org.apache.camel.BeanInject; +import org.apache.camel.CamelContext; +import org.apache.camel.CamelContextAware; +import org.apache.camel.Exchange; +import org.apache.camel.Processor; + +/** + * Processor applies data type conversion based on given format name. Searches for matching data type converter + * with given component scheme and format name. + */ +public class DataTypeProcessor implements Processor, CamelContextAware { + + private CamelContext camelContext; + + @BeanInject + private DefaultDataTypeRegistry dataTypeRegistry; + + private String scheme; + private String format; + + @Override + public void process(Exchange exchange) throws Exception { + if (format == null || format.isEmpty()) { + return; + } + + dataTypeRegistry.lookup(scheme, format) + .ifPresent(converter -> converter.convert(exchange)); + } + + public void setFormat(String format) { + this.format = format; + } + + public void setScheme(String scheme) { + this.scheme = scheme; + } + + @Override + public CamelContext getCamelContext() { + return camelContext; + } + + @Override + public void setCamelContext(CamelContext camelContext) { + this.camelContext = camelContext; + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java new file mode 100644 index 000000000..11680b50b --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import org.apache.camel.Exchange; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; + +/** + * Default data type converter receives a name and a target type in order to use traditional exchange body conversion + * mechanisms in order to transform the message body to a given type. + */ +public class DefaultDataTypeConverter implements DataTypeConverter { + + private final String name; + private final Class type; + + public DefaultDataTypeConverter(String name, Class type) { + this.name = name; + this.type = type; + } + + @Override + public void convert(Exchange exchange) { + if (type.isInstance(exchange.getMessage().getBody())) { + return; + } + + exchange.getMessage().setBody(exchange.getMessage().getBody(type)); + } + + @Override + public String getName() { + return name; + } + + public Class getType() { + return type; + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java new file mode 100644 index 000000000..e7c6e3e87 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.camel.CamelContext; +import org.apache.camel.CamelContextAware; +import org.apache.camel.ExtendedCamelContext; +import org.apache.camel.RuntimeCamelException; +import org.apache.camel.impl.engine.DefaultInjector; +import org.apache.camel.impl.engine.DefaultPackageScanClassResolver; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; +import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry; +import org.apache.camel.spi.PackageScanClassResolver; +import org.apache.camel.support.service.ServiceSupport; + +/** + * Default data type registry able to resolve data types converters in the project. Data types may be defined at the component level + * via {@link org.apache.camel.kamelets.utils.format.spi.annotations.DataType} annotations. Also, users can add data types directly + * to the Camel context or manually to the registry. + * + * The registry is able to retrieve converters for a given data type based on the component scheme and the given data type name. + */ +public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeRegistry, CamelContextAware { + + private CamelContext camelContext; + + private PackageScanClassResolver resolver; + + protected final List dataTypeLoaders = new ArrayList<>(); + + private final Map> dataTypeConverters = new HashMap<>(); + + @Override + public void addDataTypeConverter(String scheme, DataTypeConverter converter) { + this.getComponentDataTypeConverters(scheme).add(converter); + } + + @Override + public Optional lookup(String scheme, String name) { + if (dataTypeLoaders.isEmpty()) { + try { + doInit(); + } catch (Exception e) { + throw new RuntimeCamelException("Failed to initialize data type registry", e); + } + } + + if (name == null) { + return Optional.empty(); + } + + Optional componentDataTypeConverter = getComponentDataTypeConverters(scheme).stream() + .filter(dtc -> name.equals(dtc.getName())) + .findFirst(); + + if (componentDataTypeConverter.isPresent()) { + return componentDataTypeConverter; + } + + return getDefaultDataTypeConverter(name); + } + + @Override + protected void doInit() throws Exception { + super.doInit(); + + if (resolver == null) { + if (camelContext != null) { + resolver = camelContext.adapt(ExtendedCamelContext.class).getPackageScanClassResolver(); + } else { + resolver = new DefaultPackageScanClassResolver(); + } + } + + dataTypeLoaders.add(new AnnotationDataTypeLoader(new DefaultInjector(camelContext), resolver)); + + addDataTypeConverter(new DefaultDataTypeConverter("string", String.class)); + addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class)); + + for (DataTypeLoader loader : dataTypeLoaders) { + CamelContextAware.trySetCamelContext(loader, getCamelContext()); + loader.load(this); + } + } + + @Override + protected void doStop() throws Exception { + super.doStop(); + + this.dataTypeConverters.clear(); + } + + /** + * Retrieve default data output type from Camel context for given format name. + * @param name + * @return + */ + private Optional getDefaultDataTypeConverter(String name) { + Optional dataTypeConverter = getComponentDataTypeConverters("camel").stream() + .filter(dtc -> name.equals(dtc.getName())) + .findFirst(); + + if (dataTypeConverter.isPresent()) { + return dataTypeConverter; + } + + return Optional.ofNullable(camelContext.getRegistry().lookupByNameAndType(name, DataTypeConverter.class)); + } + + /** + * Retrieve list of data types defined on the component level for given scheme. + * @param scheme + * @return + */ + private List getComponentDataTypeConverters(String scheme) { + if (!dataTypeConverters.containsKey(scheme)) { + dataTypeConverters.put(scheme, new ArrayList<>()); + } + + return dataTypeConverters.get(scheme); + } + + @Override + public CamelContext getCamelContext() { + return camelContext; + } + + @Override + public void setCamelContext(CamelContext camelContext) { + this.camelContext = camelContext; + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java similarity index 69% rename from library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java rename to library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java index c5098c1c6..a15ff3a08 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java @@ -14,22 +14,27 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.kamelets.utils.transform.aws.ddb; +package org.apache.camel.kamelets.utils.format.converter.aws2.ddb; + +import java.io.InputStream; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange; -import org.apache.camel.ExchangeProperty; -import org.apache.camel.InvalidPayloadException; import org.apache.camel.component.aws2.ddb.Ddb2Constants; import org.apache.camel.component.aws2.ddb.Ddb2Operations; +import org.apache.camel.component.jackson.JacksonDataFormat; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; import software.amazon.awssdk.services.dynamodb.model.AttributeAction; import software.amazon.awssdk.services.dynamodb.model.AttributeValue; import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate; @@ -40,55 +45,78 @@ * * Json property names map to attribute keys and Json property values map to attribute values. * - * During mapping the Json property types resolve to the respective attribute types ({@code String, StringSet, Boolean, Number, NumberSet, Map, Null}). - * Primitive typed arrays in Json get mapped to {@code StringSet} or {@code NumberSet} attribute values. + * During mapping the Json property types resolve to the respective attribute types + * ({@code String, StringSet, Boolean, Number, NumberSet, Map, Null}). Primitive typed arrays in Json get mapped to + * {@code StringSet} or {@code NumberSet} attribute values. + * + * The input type supports the operations: PutItem, UpdateItem, DeleteItem * * For PutItem operation the Json body defines all item attributes. * * For DeleteItem operation the Json body defines only the primary key attributes that identify the item to delete. * - * For UpdateItem operation the Json body defines both key attributes to identify the item to be updated and all item attributes tht get updated on the item. + * For UpdateItem operation the Json body defines both key attributes to identify the item to be updated and all item + * attributes tht get updated on the item. + * + * The given Json body can use "operation", "key" and "item" as top level properties. Both define a Json object that + * will be mapped to respective attribute value maps: * - * The given Json body can use "key" and "item" as top level properties. - * Both define a Json object that will be mapped to respective attribute value maps: - *
{@code
+ * 
+ * {@code
  * {
+ *   "operation": "PutItem"
  *   "key": {},
  *   "item": {}
  * }
  * }
  * 
- * The converter will extract the objects and set respective attribute value maps as header entries. - * This is a comfortable way to define different key and item attribute value maps e.g. on UpdateItem operation. * - * In case key and item attribute value maps are identical you can omit the special top level properties completely. - * The converter will map the whole Json body as is then and use it as source for the attribute value map. + * The converter will extract the objects and set respective attribute value maps as header entries. This is a + * comfortable way to define different key and item attribute value maps e.g. on UpdateItem operation. + * + * In case key and item attribute value maps are identical you can omit the special top level properties completely. The + * converter will map the whole Json body as is then and use it as source for the attribute value map. */ -public class JsonToDdbModelConverter { +@DataType(scheme = "aws2-ddb", name = "json") +public class Ddb2JsonInputType implements DataTypeConverter { + + private final JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), JsonNode.class); - public String process(@ExchangeProperty("operation") String operation, Exchange exchange) throws InvalidPayloadException { + @Override + public void convert(Exchange exchange) { if (exchange.getMessage().getHeaders().containsKey(Ddb2Constants.ITEM) || exchange.getMessage().getHeaders().containsKey(Ddb2Constants.KEY)) { - return ""; + return; } - ObjectMapper mapper = new ObjectMapper(); + JsonNode jsonBody = getBodyAsJsonNode(exchange); + + String operation + = Optional.ofNullable(jsonBody.get("operation")).map(JsonNode::asText).orElse(Ddb2Operations.PutItem.name()); + if (exchange.hasProperties() && exchange.getProperty("operation", String.class) != null) { + operation = exchange.getProperty("operation", String.class); + } - JsonNode jsonBody = exchange.getMessage().getMandatoryBody(JsonNode.class); + if (exchange.getIn().getHeaders().containsKey(Ddb2Constants.OPERATION)) { + operation = exchange.getIn().getHeader(Ddb2Constants.OPERATION, Ddb2Operations.class).name(); + } JsonNode key = jsonBody.get("key"); JsonNode item = jsonBody.get("item"); Map keyProps; if (key != null) { - keyProps = mapper.convertValue(key, new TypeReference>(){}); + keyProps = dataFormat.getObjectMapper().convertValue(key, new TypeReference>() { + }); } else { - keyProps = mapper.convertValue(jsonBody, new TypeReference>(){}); + keyProps = dataFormat.getObjectMapper().convertValue(jsonBody, new TypeReference>() { + }); } Map itemProps; if (item != null) { - itemProps = mapper.convertValue(item, new TypeReference>(){}); + itemProps = dataFormat.getObjectMapper().convertValue(item, new TypeReference>() { + }); } else { itemProps = keyProps; } @@ -115,8 +143,18 @@ public String process(@ExchangeProperty("operation") String operation, Exchange default: throw new UnsupportedOperationException(String.format("Unsupported operation '%s'", operation)); } + } - return ""; + private JsonNode getBodyAsJsonNode(Exchange exchange) { + try { + if (exchange.getMessage().getBody() instanceof JsonNode) { + return exchange.getMessage().getMandatoryBody(JsonNode.class); + } + + return (JsonNode) dataFormat.unmarshal(exchange, exchange.getMessage().getMandatoryBody(InputStream.class)); + } catch (Exception e) { + throw new CamelExecutionException("Failed to get mandatory Json node from message body", exchange, e); + } } private void setHeaderIfNotPresent(String headerName, Object value, Exchange exchange) { @@ -165,11 +203,12 @@ private static AttributeValue getAttributeValue(Object value) { } if (value instanceof int[]) { - return AttributeValue.builder().ns(Stream.of((int[]) value).map(Object::toString).collect(Collectors.toList())).build(); + return AttributeValue.builder().ns(Stream.of((int[]) value).map(Object::toString).collect(Collectors.toList())) + .build(); } if (value instanceof List) { - List values = ((List) value); + List values = (List) value; if (values.isEmpty()) { return AttributeValue.builder().ss().build(); diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java new file mode 100644 index 000000000..6065ebd10 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.aws2.s3; + +import java.io.IOException; +import java.io.InputStream; + +import org.apache.camel.CamelExecutionException; +import org.apache.camel.Exchange; +import org.apache.camel.InvalidPayloadException; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; +import software.amazon.awssdk.utils.IoUtils; + +/** + * Binary output type. + */ +@DataType(scheme = "aws2-s3", name = "binary") +public class AWS2S3BinaryOutputType implements DataTypeConverter { + + @Override + public void convert(Exchange exchange) { + if (exchange.getMessage().getBody() instanceof byte[]) { + return; + } + + try { + InputStream is = exchange.getMessage().getBody(InputStream.class); + if (is != null) { + exchange.getMessage().setBody(IoUtils.toByteArray(is)); + return; + } + + // Use default Camel converter utils to convert body to byte[] + exchange.getMessage().setBody(exchange.getMessage().getMandatoryBody(byte[].class)); + } catch (IOException | InvalidPayloadException e) { + throw new CamelExecutionException("Failed to convert AWS S3 body to byte[]", exchange, e); + } + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java new file mode 100644 index 000000000..74736d675 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.aws2.s3; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.camel.CamelExecutionException; +import org.apache.camel.Exchange; +import org.apache.camel.component.aws2.s3.AWS2S3Constants; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.utils.IoUtils; + +/** + * Json output data type represents file name as key and file content as Json structure. + *

+ * Example Json structure: { "key": "myFile.txt", "content": "Hello", } + */ +@DataType(scheme = "aws2-s3", name = "json") +public class AWS2S3JsonOutputType implements DataTypeConverter { + + private static final String TEMPLATE = "{" + + "\"key\": \"%s\", " + + "\"content\": \"%s\"" + + "}"; + + @Override + public void convert(Exchange exchange) { + String key = exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class); + + ResponseInputStream bodyInputStream = exchange.getMessage().getBody(ResponseInputStream.class); + if (bodyInputStream != null) { + try { + exchange.getMessage().setBody(String.format(TEMPLATE, key, IoUtils.toUtf8String(bodyInputStream))); + return; + } catch (IOException e) { + throw new CamelExecutionException("Failed to convert AWS S3 body to Json", exchange, e); + } + } + + byte[] bodyContent = exchange.getMessage().getBody(byte[].class); + if (bodyContent != null) { + exchange.getMessage().setBody(String.format(TEMPLATE, key, new String(bodyContent, StandardCharsets.UTF_8))); + } + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java new file mode 100644 index 000000000..047e6dd51 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.standard; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.camel.CamelExecutionException; +import org.apache.camel.Exchange; +import org.apache.camel.InvalidPayloadException; +import org.apache.camel.component.jackson.JacksonDataFormat; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; + +/** + * Data type converter able to unmarshal to given unmarshalType using jackson data format. + *

+ * Unmarshal type should be given as a fully qualified class name in the exchange properties. + */ +@DataType(name = "jsonObject") +public class JsonModelDataType implements DataTypeConverter { + + public static final String JSON_DATA_TYPE_KEY = "CamelJsonModelDataType"; + + @Override + public void convert(Exchange exchange) { + if (!exchange.hasProperties() || !exchange.getProperties().containsKey(JSON_DATA_TYPE_KEY)) { + return; + } + + String type = exchange.getProperty(JSON_DATA_TYPE_KEY, String.class); + try (JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), Class.forName(type))) { + Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange)); + exchange.getMessage().setBody(unmarshalled); + } catch (Exception e) { + throw new CamelExecutionException( + String.format("Failed to load Json unmarshalling type '%s'", type), exchange, e); + } + } + + private InputStream getBodyAsStream(Exchange exchange) throws InvalidPayloadException { + InputStream bodyStream = exchange.getMessage().getBody(InputStream.class); + + if (bodyStream == null) { + bodyStream = new ByteArrayInputStream(exchange.getMessage().getMandatoryBody(byte[].class)); + } + + return bodyStream; + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java new file mode 100644 index 000000000..d39d30f80 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.spi; + +import org.apache.camel.Exchange; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; + +@FunctionalInterface +public interface DataTypeConverter { + + void convert(Exchange exchange); + + /** + * Gets the data type converter name. Automatically derives the name from given type annotation. + * @return + */ + default String getName() { + if (this.getClass().isAnnotationPresent(DataType.class)) { + return this.getClass().getAnnotation(DataType.class).name(); + } + + throw new UnsupportedOperationException("Missing data type converter name"); + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java new file mode 100644 index 000000000..73f87c696 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.spi; + +/** + * A pluggable strategy to load data types into a {@link DataTypeRegistry}. + */ +public interface DataTypeLoader { + + /** + * A pluggable strategy to load data types into a registry. + * + * @param registry the registry to load the data types into + */ + void load(DataTypeRegistry registry); +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java new file mode 100644 index 000000000..cb2bedc91 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.spi; + +import java.util.Optional; + +/** + * Registry for data types. Data type loaders should be used to add types to the registry. + *

+ * The registry is able to perform a lookup of a specific data type. + */ +public interface DataTypeRegistry { + + /** + * Registers a new default data type converter. + * @param scheme + * @param converter + */ + void addDataTypeConverter(String scheme, DataTypeConverter converter); + + /** + * Registers a new default data type converter. + * @param converter + */ + default void addDataTypeConverter(DataTypeConverter converter) { + addDataTypeConverter("camel", converter); + } + + /** + * Find data type for given component scheme and data type name. + * @param scheme + * @param name + * @return + */ + Optional lookup(String scheme, String name); + + /** + * Find data type for given data type name. + * @param name + * @return + */ + default Optional lookup(String name) { + return lookup("camel", name); + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java new file mode 100644 index 000000000..b1d4f5a9c --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.spi.annotations; + +import java.lang.annotation.Documented; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Data type annotation defines a type with its component scheme, a name and input/output types. + */ +@Retention(RetentionPolicy.RUNTIME) +@Documented +@Target({ ElementType.TYPE }) +public @interface DataType { + + /** + * Camel component scheme. + * @return + */ + String scheme() default "camel"; + + /** + * Data type name. + * @return + */ + String name(); + + /** + * The media type associated with this data type. + * @return + */ + String mediaType() default ""; +} diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType new file mode 100644 index 000000000..b51d34040 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.apache.camel.kamelets.utils.format.converter.standard +org.apache.camel.kamelets.utils.format.converter.aws2.ddb +org.apache.camel.kamelets.utils.format.converter.aws2.s3 \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java new file mode 100644 index 000000000..2ee4113e3 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import java.util.Optional; + +import org.apache.camel.CamelContextAware; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class DefaultDataTypeRegistryTest { + + private DefaultCamelContext camelContext; + + private DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + + @BeforeEach + void setup() { + this.camelContext = new DefaultCamelContext(); + CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + } + + @Test + public void shouldLookupDefaultDataTypeConverters() throws Exception { + Optional converter = dataTypeRegistry.lookup( "jsonObject"); + Assertions.assertTrue(converter.isPresent()); + Assertions.assertEquals(JsonModelDataType.class, converter.get().getClass()); + converter = dataTypeRegistry.lookup( "string"); + Assertions.assertTrue(converter.isPresent()); + Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass()); + Assertions.assertEquals(String.class, ((DefaultDataTypeConverter) converter.get()).getType()); + converter = dataTypeRegistry.lookup( "binary"); + Assertions.assertTrue(converter.isPresent()); + Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass()); + Assertions.assertEquals(byte[].class, ((DefaultDataTypeConverter) converter.get()).getType()); + } + +} \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java similarity index 65% rename from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java rename to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java index 33d27bfe4..7f1f9e9fc 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputTypeTest.java @@ -14,16 +14,21 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.kamelets.utils.transform.aws.ddb; + +package org.apache.camel.kamelets.utils.format.converter.aws2.ddb; import java.util.Map; +import java.util.Optional; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.camel.CamelContextAware; +import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange; -import org.apache.camel.InvalidPayloadException; import org.apache.camel.component.aws2.ddb.Ddb2Constants; import org.apache.camel.component.aws2.ddb.Ddb2Operations; import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.support.DefaultExchange; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; @@ -33,25 +38,25 @@ import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate; import software.amazon.awssdk.services.dynamodb.model.ReturnValue; -class JsonToDdbModelConverterTest { +public class Ddb2JsonInputTypeTest { private DefaultCamelContext camelContext; private final ObjectMapper mapper = new ObjectMapper(); - private final JsonToDdbModelConverter processor = new JsonToDdbModelConverter(); + private final Ddb2JsonInputType inputType = new Ddb2JsonInputType(); private final String keyJson = "{" + - "\"name\": \"Rajesh Koothrappali\"" + + "\"name\": \"Rajesh Koothrappali\"" + "}"; private final String itemJson = "{" + - "\"name\": \"Rajesh Koothrappali\"," + - "\"age\": 29," + - "\"super-heroes\": [\"batman\", \"spiderman\", \"wonderwoman\"]," + - "\"issues\": [5, 3, 9, 1]," + - "\"girlfriend\": null," + - "\"doctorate\": true" + + "\"name\": \"Rajesh Koothrappali\"," + + "\"age\": 29," + + "\"super-heroes\": [\"batman\", \"spiderman\", \"wonderwoman\"]," + + "\"issues\": [5, 3, 9, 1]," + + "\"girlfriend\": null," + + "\"doctorate\": true" + "}"; @BeforeEach @@ -65,8 +70,8 @@ void shouldMapPutItemHeaders() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(mapper.readTree(itemJson)); - - processor.process(Ddb2Operations.PutItem.name(), exchange); + exchange.setProperty("operation", Ddb2Operations.PutItem.name()); + inputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); @@ -80,9 +85,10 @@ void shouldMapPutItemHeaders() throws Exception { void shouldMapUpdateItemHeaders() throws Exception { Exchange exchange = new DefaultExchange(camelContext); - exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + ", \"item\": " + itemJson + "}")); + exchange.getMessage().setBody(mapper.readTree("{\"operation\": \"" + Ddb2Operations.UpdateItem.name() + "\", \"key\": " + + keyJson + ", \"item\": " + itemJson + "}")); - processor.process(Ddb2Operations.UpdateItem.name(), exchange); + inputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.UpdateItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); @@ -101,8 +107,9 @@ void shouldMapDeleteItemHeaders() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + "}")); + exchange.setProperty("operation", Ddb2Operations.DeleteItem.name()); - processor.process(Ddb2Operations.DeleteItem.name(), exchange); + inputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.DeleteItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); @@ -119,8 +126,8 @@ void shouldMapNestedObjects() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(mapper.readTree("{\"user\":" + itemJson + "}")); - - processor.process(Ddb2Operations.PutItem.name(), exchange); + exchange.setProperty("operation", Ddb2Operations.PutItem.name()); + inputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); @@ -130,11 +137,12 @@ void shouldMapNestedObjects() throws Exception { Assertions.assertEquals(1L, attributeValueMap.size()); Assertions.assertEquals("AttributeValue(M={name=AttributeValue(S=Rajesh Koothrappali), " + - "age=AttributeValue(N=29), " + - "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " + - "issues=AttributeValue(NS=[5, 3, 9, 1]), " + - "girlfriend=AttributeValue(NUL=true), " + - "doctorate=AttributeValue(BOOL=true)})", attributeValueMap.get("user").toString()); + "age=AttributeValue(N=29), " + + "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " + + "issues=AttributeValue(NS=[5, 3, 9, 1]), " + + "girlfriend=AttributeValue(NUL=true), " + + "doctorate=AttributeValue(BOOL=true)})", + attributeValueMap.get("user").toString()); } @Test @@ -142,9 +150,10 @@ void shouldMapNestedObjects() throws Exception { void shouldMapEmptyJson() throws Exception { Exchange exchange = new DefaultExchange(camelContext); - exchange.getMessage().setBody(mapper.readTree("{}")); + exchange.getMessage().setBody("{}"); + exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.PutItem.name()); - processor.process(Ddb2Operations.PutItem.name(), exchange); + inputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); @@ -154,20 +163,39 @@ void shouldMapEmptyJson() throws Exception { Assertions.assertEquals(0L, attributeValueMap.size()); } - @Test() + @Test + void shouldFailForWrongBodyType() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody("Hello"); + + Assertions.assertThrows(CamelExecutionException.class, () -> inputType.convert(exchange)); + } + + @Test void shouldFailForUnsupportedOperation() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(mapper.readTree("{}")); + exchange.setProperty("operation", Ddb2Operations.BatchGetItems.name()); - Assertions.assertThrows(UnsupportedOperationException.class, () -> processor.process(Ddb2Operations.BatchGetItems.name(), exchange)); + Assertions.assertThrows(UnsupportedOperationException.class, () -> inputType.convert(exchange)); + } + + @Test + public void shouldLookupDataType() throws Exception { + DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + Optional converter = dataTypeRegistry.lookup("aws2-ddb", "json"); + Assertions.assertTrue(converter.isPresent()); } private void assertAttributeValueMap(Map attributeValueMap) { Assertions.assertEquals(6L, attributeValueMap.size()); Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name")); Assertions.assertEquals(AttributeValue.builder().n("29").build(), attributeValueMap.get("age")); - Assertions.assertEquals(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build(), attributeValueMap.get("super-heroes")); + Assertions.assertEquals(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build(), + attributeValueMap.get("super-heroes")); Assertions.assertEquals(AttributeValue.builder().ns("5", "3", "9", "1").build(), attributeValueMap.get("issues")); Assertions.assertEquals(AttributeValue.builder().nul(true).build(), attributeValueMap.get("girlfriend")); Assertions.assertEquals(AttributeValue.builder().bool(true).build(), attributeValueMap.get("doctorate")); @@ -175,11 +203,19 @@ private void assertAttributeValueMap(Map attributeValueM private void assertAttributeValueUpdateMap(Map attributeValueMap) { Assertions.assertEquals(6L, attributeValueMap.size()); - Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().s("Rajesh Koothrappali").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("name")); - Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().n("29").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("age")); - Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("super-heroes")); - Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ns("5", "3", "9", "1").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("issues")); - Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().nul(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("girlfriend")); - Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().bool(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("doctorate")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().s("Rajesh Koothrappali").build()) + .action(AttributeAction.PUT).build(), attributeValueMap.get("name")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().n("29").build()) + .action(AttributeAction.PUT).build(), attributeValueMap.get("age")); + Assertions.assertEquals( + AttributeValueUpdate.builder().value(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build()) + .action(AttributeAction.PUT).build(), + attributeValueMap.get("super-heroes")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ns("5", "3", "9", "1").build()) + .action(AttributeAction.PUT).build(), attributeValueMap.get("issues")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().nul(true).build()) + .action(AttributeAction.PUT).build(), attributeValueMap.get("girlfriend")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().bool(true).build()) + .action(AttributeAction.PUT).build(), attributeValueMap.get("doctorate")); } } diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java new file mode 100644 index 000000000..53357adde --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.kamelets.utils.format.converter.aws2.s3; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Optional; + +import org.apache.camel.CamelContextAware; +import org.apache.camel.Exchange; +import org.apache.camel.component.aws2.s3.AWS2S3Constants; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.support.DefaultExchange; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.http.AbortableInputStream; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class AWS2S3JsonOutputTypeTest { + + private final DefaultCamelContext camelContext = new DefaultCamelContext(); + + private final AWS2S3JsonOutputType outputType = new AWS2S3JsonOutputType(); + + @Test + void shouldMapFromStringToJsonModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt"); + exchange.getMessage().setBody("Test1"); + outputType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY)); + + assertJsonModelBody(exchange, "test1.txt", "Test1"); + } + + @Test + void shouldMapFromBytesToJsonModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test2.txt"); + exchange.getMessage().setBody("Test2".getBytes(StandardCharsets.UTF_8)); + outputType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + assertEquals("test2.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY)); + + assertJsonModelBody(exchange, "test2.txt", "Test2"); + } + + @Test + void shouldMapFromInputStreamToJsonModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test3.txt"); + exchange.getMessage().setBody(new ResponseInputStream<>(GetObjectRequest.builder().bucket("myBucket").key("test3.txt").build(), + AbortableInputStream.create(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8))))); + outputType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + assertEquals("test3.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY)); + + assertJsonModelBody(exchange, "test3.txt", "Test3"); + } + + @Test + public void shouldLookupDataType() throws Exception { + DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + Optional converter = dataTypeRegistry.lookup("aws2-s3", "json"); + Assertions.assertTrue(converter.isPresent()); + } + + private static void assertJsonModelBody(Exchange exchange, String key, String content) { + assertEquals(String.format("{\"key\": \"%s\", \"content\": \"%s\"}", key, content), exchange.getMessage().getBody()); + } +} diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java new file mode 100644 index 000000000..c175cc6d9 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.standard; + +import java.util.Optional; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.apache.camel.CamelContextAware; +import org.apache.camel.Exchange; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.support.DefaultExchange; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class JsonModelDataTypeTest { + + private final DefaultCamelContext camelContext = new DefaultCamelContext(); + + private final JsonModelDataType dataType = new JsonModelDataType(); + + @Test + void shouldMapFromStringToJsonModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.setProperty(JsonModelDataType.JSON_DATA_TYPE_KEY, Person.class.getName()); + exchange.getMessage().setBody("{ \"name\": \"Sheldon\", \"age\": 29}"); + dataType.convert(exchange); + + assertEquals(Person.class, exchange.getMessage().getBody().getClass()); + assertEquals("Sheldon", exchange.getMessage().getBody(Person.class).getName()); + } + + @Test + public void shouldLookupDataType() throws Exception { + DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + Optional converter = dataTypeRegistry.lookup("jsonObject"); + Assertions.assertTrue(converter.isPresent()); + } + + public static class Person { + @JsonProperty + private String name; + + @JsonProperty + private Long age; + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public Long getAge() { + return age; + } + + public void setAge(Long age) { + this.age = age; + } + } + +} \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/resources/log4j2-test.xml b/library/camel-kamelets-utils/src/test/resources/log4j2-test.xml new file mode 100644 index 000000000..1d6d8f383 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/resources/log4j2-test.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml index 5b603abfc..ba2003478 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml @@ -97,6 +97,12 @@ spec: x-descriptors: - 'urn:alm:descriptor:com.tectonic.ui:checkbox' default: false + inputFormat: + title: Input Type + description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type. + type: string + default: json + example: json types: in: mediaType: application/json @@ -107,17 +113,24 @@ spec: - "camel:aws2-ddb" - "camel:kamelet" template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: inputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-ddb' + - key: format + value: '{{inputFormat}}' from: uri: "kamelet:source" steps: - set-property: - name: operation - constant: "{{operation}}" - - unmarshal: - json: - library: Jackson - unmarshalType: com.fasterxml.jackson.databind.JsonNode - - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter" + name: operation + constant: "{{operation}}" + - process: + ref: "{{inputTypeProcessor}}" - to: uri: "aws2-ddb:{{table}}" parameters: diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml index 6ab2bca41..e09cf4aa2 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml @@ -107,6 +107,12 @@ spec: description: The number of milliseconds before the next poll of the selected bucket. type: integer default: 500 + outputFormat: + title: Output Type + description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type. + type: string + default: binary + example: binary dependencies: - "camel:core" - "camel:aws2-s3" @@ -114,6 +120,15 @@ spec: - "camel:kamelet" template: beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: outputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-s3' + - key: format + value: '{{outputFormat}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: @@ -143,4 +158,6 @@ spec: steps: - process: ref: "{{renameHeaders}}" + - process: + ref: "{{outputTypeProcessor}}" - to: "kamelet:sink" diff --git a/test/aws-s3/README.md b/test/aws-s3/README.md new file mode 100644 index 000000000..6e7d7315f --- /dev/null +++ b/test/aws-s3/README.md @@ -0,0 +1,76 @@ +# AWS S3 Kamelet test + +This test verifies the AWS S3 Kamelet source defined in [aws-s3-source.kamelet.yaml](aws-s3-source.kamelet.yaml) + +## Objectives + +The test verifies the AWS S3 Kamelet source by creating a Camel K integration that uses the Kamelet and listens for messages on the +AWS S3 bucket. + +The test uses a [LocalStack Testcontainers](https://www.testcontainers.org/modules/localstack/) instance to start a local AWS S3 service for mocking reasons. +The Kamelet and the test interact with the local AWS S3 service for validation of functionality. + +### Test Kamelet source + +The test performs the following high level steps for configs - URI, secret and property based: + +*Preparation* +- Start the AWS S3 service as LocalStack container +- Overwrite the Kamelet with the latest source +- Prepare the Camel AWS S3 client + +*Scenario* +- Create the Kamelet in the current namespace in the cluster +- Create the Camel K integration that uses the Kamelet source to consume data from AWS S3 service +- Wait for the Camel K integration to start and listen for AWS S3 messages +- Create a new message in the AWS S3 bucket +- Verify that the integration has received the message event + +*Cleanup* +- Stop the LocalStack container +- Delete the Camel K integration +- Delete the secret from the current namespacce + +## Installation + +The test assumes that you have access to a Kubernetes cluster and that the Camel K operator as well as the YAKS operator is installed +and running. + +You can review the installation steps for the operators in the documentation: + +- [Install Camel K operator](https://camel.apache.org/camel-k/latest/installation/installation.html) +- [Install YAKS operator](https://github.com/citrusframework/yaks#installation) + +## Run the tests + +To run tests with URI based configuration: + +```shell script +$ yaks test aws-s3-source-uri-conf.feature +``` + +To run tests with secret based configuration: + +```shell script +$ yaks test aws-s3-source-secret-conf.feature +``` + +To run tests with property based configuration: + +```shell script +$ yaks test aws-s3-source-property-conf.feature +``` + +To run tests with URI binding: + +```shell script +$ yaks test aws-s3-uri-binding.feature +``` + +To run tests with binding to Knative channel: + +```shell script +$ yaks test aws-s3-inmem-binding.feature +``` + +You will be provided with the test log output and the test results. diff --git a/test/aws-s3/amazonS3Client.groovy b/test/aws-s3/amazonS3Client.groovy new file mode 100644 index 000000000..5c3ff8a01 --- /dev/null +++ b/test/aws-s3/amazonS3Client.groovy @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider +import software.amazon.awssdk.regions.Region +import software.amazon.awssdk.services.s3.S3Client + +S3Client s3 = S3Client + .builder() + .endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}")) + .credentialsProvider(StaticCredentialsProvider.create( + AwsBasicCredentials.create( + "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}", + "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}") + )) + .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}")) + .build() + +s3.createBucket(b -> b.bucket("${aws.s3.bucketNameOrArn}")) + +return s3 diff --git a/test/aws-s3/aws-s3-credentials.properties b/test/aws-s3/aws-s3-credentials.properties new file mode 100644 index 000000000..f9dd1e10b --- /dev/null +++ b/test/aws-s3/aws-s3-credentials.properties @@ -0,0 +1,7 @@ +# Please add your AWS S3 account credentials +camel.kamelet.aws-s3-source.aws-s3-credentials.bucketNameOrArn=${aws.s3.bucketNameOrArn} +camel.kamelet.aws-s3-source.aws-s3-credentials.overrideEndpoint=true +camel.kamelet.aws-s3-source.aws-s3-credentials.uriEndpointOverride=${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL} +camel.kamelet.aws-s3-source.aws-s3-credentials.secretKey=${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} +camel.kamelet.aws-s3-source.aws-s3-credentials.accessKey=${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} +camel.kamelet.aws-s3-source.aws-s3-credentials.region=${YAKS_TESTCONTAINERS_LOCALSTACK_REGION} diff --git a/test/aws-s3/aws-s3-inmem-binding.feature b/test/aws-s3/aws-s3-inmem-binding.feature new file mode 100644 index 000000000..d67e77984 --- /dev/null +++ b/test/aws-s3/aws-s3-inmem-binding.feature @@ -0,0 +1,49 @@ +@knative +Feature: AWS S3 Kamelet - binding to InMemoryChannel + + Background: + Given Kamelet aws-s3-source is available + Given variables + | aws.s3.bucketNameOrArn | mybucket | + | aws.s3.message | Hello from S3 Kamelet | + | aws.s3.key | hello.txt | + + Scenario: Start LocalStack container + Given Enable service S3 + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-S3 client + Given New global Camel context + Given load to Camel registry amazonS3Client.groovy + + Scenario: Create Knative broker and channel + Given create Knative broker default + And Knative broker default is running + Given create Knative channel messages + + Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding + Given variable loginfo is "Installed features" + Given load KameletBinding aws-s3-to-inmem.yaml + Given load KameletBinding inmem-to-log.yaml + Then KameletBinding aws-s3-to-inmem should be available + And KameletBinding inmem-to-log should be available + And Camel K integration aws-s3-to-inmem is running + And Camel K integration inmem-to-log is running + And Camel K integration aws-s3-to-inmem should print ${loginfo} + And Camel K integration inmem-to-log should print ${loginfo} + Then sleep 10000 ms + + Scenario: Verify Kamelet source + Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" + Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} + Then Camel K integration inmem-to-log should print ${aws.s3.message} + + Scenario: Remove resources + Given delete KameletBinding aws-s3-to-inmem + Given delete KameletBinding inmem-to-log + Given delete Knative broker default + Given delete Knative channel messages + + Scenario: Stop container + Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-source-property-conf.feature b/test/aws-s3/aws-s3-source-property-conf.feature new file mode 100644 index 000000000..93a2d3539 --- /dev/null +++ b/test/aws-s3/aws-s3-source-property-conf.feature @@ -0,0 +1,37 @@ +Feature: AWS S3 Kamelet - property based config + + Background: + Given Kamelet aws-s3-source is available + Given variables + | aws.s3.bucketNameOrArn | mybucket | + | aws.s3.message | Hello from S3 Kamelet | + | aws.s3.key | hello.txt | + + Scenario: Start LocalStack container + Given Enable service S3 + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-S3 client + Given New global Camel context + Given load to Camel registry amazonS3Client.groovy + + Scenario: Create AWS-S3 Kamelet to log binding + Given Camel K integration property file aws-s3-credentials.properties + Given create Camel K integration aws-s3-to-log-prop-based.groovy + """ + from("kamelet:aws-s3-source/aws-s3-credentials") + .to("log:info") + """ + Then Camel K integration aws-s3-to-log-prop-based should be running + + Scenario: Verify Kamelet source + Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" + Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} + Then Camel K integration aws-s3-to-log-prop-based should print ${aws.s3.message} + + Scenario: Remove Camel K resources + Given delete Camel K integration aws-s3-to-log-prop-based + + Scenario: Stop container + Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-source-secret-conf.feature b/test/aws-s3/aws-s3-source-secret-conf.feature new file mode 100644 index 000000000..78ee9be56 --- /dev/null +++ b/test/aws-s3/aws-s3-source-secret-conf.feature @@ -0,0 +1,39 @@ +@ignored +Feature: AWS S3 Kamelet - secret based config + + Background: + Given Kamelet aws-s3-source is available + Given variables + | aws.s3.bucketNameOrArn | mybucket | + | aws.s3.message | Hello from S3 Kamelet | + | aws.s3.key | hello.txt | + + Scenario: Start LocalStack container + Given Enable service S3 + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-S3 client + Given New global Camel context + Given load to Camel registry amazonS3Client.groovy + + Scenario: Create AWS-S3 Kamelet to log binding + Given create Kubernetes secret aws-s3-source-credentials + | aws-s3-credentials.properties | citrus:encodeBase64(citrus:readFile(aws-s3-credentials.properties)) | + Given create labels on Kubernetes secret aws-s3-source-credentials + | camel.apache.org/kamelet | aws-s3-source | + | camel.apache.org/kamelet.configuration | aws-s3-credentials | + Given load Camel K integration aws-s3-to-log-secret-based.groovy + Then Camel K integration aws-s3-to-log-secret-based should be running + + Scenario: Verify Kamelet source + Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" + Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} + Then Camel K integration aws-s3-to-log-secret-based should print ${aws.s3.message} + + Scenario: Remove resources + Given delete Camel K integration aws-s3-to-log-secret-based + Given delete Kubernetes secret aws-s3-source-credentials + + Scenario: Stop container + Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-source-uri-conf.feature b/test/aws-s3/aws-s3-source-uri-conf.feature new file mode 100644 index 000000000..ca65ba7dd --- /dev/null +++ b/test/aws-s3/aws-s3-source-uri-conf.feature @@ -0,0 +1,32 @@ +Feature: AWS S3 Kamelet - URI based config + + Background: + Given Kamelet aws-s3-source is available + Given variables + | aws.s3.bucketNameOrArn | mybucket | + | aws.s3.message | Hello from S3 Kamelet | + | aws.s3.key | hello.txt | + + Scenario: Start LocalStack container + Given Enable service S3 + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create S3 client + Given New global Camel context + Given load to Camel registry amazonS3Client.groovy + + Scenario: Create AWS-S3 Kamelet to log binding + Given load Camel K integration aws-s3-to-log-uri-based.groovy + Then Camel K integration aws-s3-to-log-uri-based should be running + + Scenario: Verify Kamelet source + Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" + Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} + Then Camel K integration aws-s3-to-log-uri-based should print ${aws.s3.message} + + Scenario: Remove Camel K resources + Given delete Camel K integration aws-s3-to-log-uri-based + + Scenario: Stop container + Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-to-inmem.yaml b/test/aws-s3/aws-s3-to-inmem.yaml new file mode 100644 index 000000000..ce880028d --- /dev/null +++ b/test/aws-s3/aws-s3-to-inmem.yaml @@ -0,0 +1,39 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-s3-to-inmem +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-s3-source + properties: + bucketNameOrArn: ${aws.s3.bucketNameOrArn} + overrideEndpoint: true + uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL} + accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} + secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} + region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION} + sink: + ref: + kind: InMemoryChannel + apiVersion: messaging.knative.dev/v1 + name: messages diff --git a/test/aws-s3/aws-s3-to-log-secret-based.groovy b/test/aws-s3/aws-s3-to-log-secret-based.groovy new file mode 100644 index 000000000..02fb1c58c --- /dev/null +++ b/test/aws-s3/aws-s3-to-log-secret-based.groovy @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// camel-k: language=groovy + +from("kamelet:aws-s3-source/aws-s3-credentials") + .to("log:info") diff --git a/test/aws-s3/aws-s3-to-log-uri-based.groovy b/test/aws-s3/aws-s3-to-log-uri-based.groovy new file mode 100644 index 000000000..145b5510e --- /dev/null +++ b/test/aws-s3/aws-s3-to-log-uri-based.groovy @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// camel-k: language=groovy + +def parameters = 'bucketNameOrArn=${aws.s3.bucketNameOrArn}&'+ + 'overrideEndpoint=true&' + + 'uriEndpointOverride=${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}&' + + 'accessKey=${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}&' + + 'secretKey=${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}&'+ + 'region=${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}&'+ + 'deleteAfterRead=true' + +from("kamelet:aws-s3-source?$parameters") + .to("log:info") diff --git a/test/aws-s3/aws-s3-uri-binding.feature b/test/aws-s3/aws-s3-uri-binding.feature new file mode 100644 index 000000000..ace191779 --- /dev/null +++ b/test/aws-s3/aws-s3-uri-binding.feature @@ -0,0 +1,35 @@ +Feature: AWS S3 Kamelet - binding to URI + + Background: + Given Kamelet aws-s3-source is available + Given variables + | aws.s3.bucketNameOrArn | mybucket | + | aws.s3.message | Hello from S3 Kamelet | + | aws.s3.key | hello.txt | + + Scenario: Start LocalStack container + Given Enable service S3 + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-S3 client + Given New global Camel context + Given load to Camel registry amazonS3Client.groovy + + Scenario: Create AWS-S3 Kamelet to log binding + Given variable loginfo is "Installed features" + When load KameletBinding aws-s3-uri-binding.yaml + And KameletBinding aws-s3-uri-binding is available + And Camel K integration aws-s3-uri-binding is running + Then Camel K integration aws-s3-uri-binding should print ${loginfo} + + Scenario: Verify Kamelet source + Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" + Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} + Then Camel K integration aws-s3-uri-binding should print ${aws.s3.message} + + Scenario: Remove Camel K resources + Given delete KameletBinding aws-s3-uri-binding + + Scenario: Stop container + Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml new file mode 100644 index 000000000..505228185 --- /dev/null +++ b/test/aws-s3/aws-s3-uri-binding.yaml @@ -0,0 +1,37 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-s3-uri-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-s3-source + properties: + bucketNameOrArn: ${aws.s3.bucketNameOrArn} + overrideEndpoint: true + outputFormat: json + uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL} + accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} + secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} + region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION} + sink: + uri: log:info diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml new file mode 100644 index 000000000..f36d136cd --- /dev/null +++ b/test/aws-s3/yaks-config.yaml @@ -0,0 +1,65 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +config: + namespace: + temporary: false + runtime: + testcontainers: + enabled: true + env: + - name: YAKS_CAMEL_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_CAMELK_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_KAMELETS_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_KUBERNETES_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_KNATIVE_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_TESTCONTAINERS_AUTO_REMOVE_RESOURCES + value: false + - name: CITRUS_TYPE_CONVERTER + value: camel + resources: + - amazonS3Client.groovy + - aws-s3-credentials.properties + - aws-s3-to-log-uri-based.groovy + - aws-s3-to-log-secret-based.groovy + - aws-s3-uri-binding.yaml + - aws-s3-to-inmem.yaml + - ../utils/inmem-to-log.yaml + cucumber: + tags: + - "not @ignored" + settings: + dependencies: + - groupId: com.amazonaws + artifactId: aws-java-sdk-kinesis + version: "@aws-java-sdk.version@" + - groupId: org.apache.camel + artifactId: camel-aws2-s3 + version: "@camel.version@" + - groupId: org.apache.camel + artifactId: camel-jackson + version: "@camel.version@" + dump: + enabled: true + failedOnly: true + includes: + - app=camel-k diff --git a/test/utils/inmem-to-log.yaml b/test/utils/inmem-to-log.yaml new file mode 100644 index 000000000..8b5dc51e7 --- /dev/null +++ b/test/utils/inmem-to-log.yaml @@ -0,0 +1,29 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: inmem-to-log +spec: + source: + ref: + kind: InMemoryChannel + apiVersion: messaging.knative.dev/v1 + name: messages + sink: + uri: log:info From 4760cf66d253508f130ce11b81d068a92a07d5f2 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Thu, 17 Nov 2022 12:06:45 +0100 Subject: [PATCH 02/28] Refine Kamelet data type solution with review comments - Cache converter in DataTypeProcessor so lookup is only done once - Add lazy loading of component converters via resource path lookup (DataTypeConverterResolver) - Only load standard converters via annotation package scan --- kamelets/aws-ddb-sink.kamelet.yaml | 2 + kamelets/aws-s3-source.kamelet.yaml | 2 + .../format/AnnotationDataTypeLoader.java | 41 ++++++--- .../utils/format/DataTypeProcessor.java | 27 ++++-- .../DefaultDataTypeConverterResolver.java | 83 +++++++++++++++++++ .../utils/format/DefaultDataTypeRegistry.java | 64 +++++++------- .../format/spi/DataTypeConverterResolver.java | 49 +++++++++++ .../camel/{DataType => DataTypeConverter} | 4 +- .../camel/datatype/converter/aws2-ddb-json | 18 ++++ .../camel/datatype/converter/aws2-s3-binary | 18 ++++ .../camel/datatype/converter/aws2-s3-json | 18 ++++ .../DefaultDataTypeConverterResolverTest.java | 73 ++++++++++++++++ .../format/DefaultDataTypeRegistryTest.java | 7 +- .../camel/datatype/converter/camel-jsonObject | 18 ++++ .../apache/camel/datatype/converter/foo-json | 18 ++++ .../kamelets/aws-ddb-sink.kamelet.yaml | 2 + .../kamelets/aws-s3-source.kamelet.yaml | 2 + 17 files changed, 389 insertions(+), 57 deletions(-) create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java rename library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/{DataType => DataTypeConverter} (81%) create mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json create mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary create mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java create mode 100644 library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject create mode 100644 library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml index ba2003478..a4e7a1144 100644 --- a/kamelets/aws-ddb-sink.kamelet.yaml +++ b/kamelets/aws-ddb-sink.kamelet.yaml @@ -123,6 +123,8 @@ spec: value: 'aws2-ddb' - key: format value: '{{inputFormat}}' + - key: registry + value: '{{dataTypeRegistry}}' from: uri: "kamelet:source" steps: diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml index e09cf4aa2..a63af7dc7 100644 --- a/kamelets/aws-s3-source.kamelet.yaml +++ b/kamelets/aws-s3-source.kamelet.yaml @@ -129,6 +129,8 @@ spec: value: 'aws2-s3' - key: format value: '{{outputFormat}}' + - key: registry + value: '{{dataTypeRegistry}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java index 96ca50eb9..9b37c3771 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/AnnotationDataTypeLoader.java @@ -26,12 +26,15 @@ import java.util.HashSet; import java.util.Set; +import org.apache.camel.CamelContext; +import org.apache.camel.CamelContextAware; +import org.apache.camel.ExtendedCamelContext; import org.apache.camel.TypeConverterLoaderException; +import org.apache.camel.impl.engine.DefaultPackageScanClassResolver; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry; import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; -import org.apache.camel.spi.Injector; import org.apache.camel.spi.PackageScanClassResolver; import org.apache.camel.util.IOHelper; import org.apache.camel.util.ObjectHelper; @@ -41,25 +44,31 @@ /** * Data type loader scans packages for {@link DataTypeConverter} classes annotated with {@link DataType} annotation. */ -public class AnnotationDataTypeLoader implements DataTypeLoader { +public class AnnotationDataTypeLoader implements DataTypeLoader, CamelContextAware { - public static final String META_INF_SERVICES = "META-INF/services/org/apache/camel/DataType"; + public static final String META_INF_SERVICES = "META-INF/services/org/apache/camel/DataTypeConverter"; private static final Logger LOG = LoggerFactory.getLogger(AnnotationDataTypeLoader.class); - protected final PackageScanClassResolver resolver; - protected final Injector injector; + private CamelContext camelContext; + + protected PackageScanClassResolver resolver; protected Set> visitedClasses = new HashSet<>(); protected Set visitedURIs = new HashSet<>(); - public AnnotationDataTypeLoader(Injector injector, PackageScanClassResolver resolver) { - this.injector = injector; - this.resolver = resolver; - } - @Override public void load(DataTypeRegistry registry) { + ObjectHelper.notNull(camelContext, "camelContext"); + + if (resolver == null) { + if (camelContext instanceof ExtendedCamelContext) { + resolver = camelContext.adapt(ExtendedCamelContext.class).getPackageScanClassResolver(); + } else { + resolver = new DefaultPackageScanClassResolver(); + } + } + Set packages = new HashSet<>(); LOG.trace("Searching for {} services", META_INF_SERVICES); @@ -111,7 +120,7 @@ private void loadDataType(DataTypeRegistry registry, Class type) { try { if (DataTypeConverter.class.isAssignableFrom(type) && type.isAnnotationPresent(DataType.class)) { DataType dt = type.getAnnotation(DataType.class); - DataTypeConverter converter = (DataTypeConverter) injector.newInstance(type); + DataTypeConverter converter = (DataTypeConverter) camelContext.getInjector().newInstance(type); registry.addDataTypeConverter(dt.scheme(), converter); } } catch (NoClassDefFoundError e) { @@ -149,4 +158,14 @@ protected void findPackages(Set packages, ClassLoader classLoader) throw } } } + + @Override + public void setCamelContext(CamelContext camelContext) { + this.camelContext = camelContext; + } + + @Override + public CamelContext getCamelContext() { + return camelContext; + } } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java index 859269fe4..81c583301 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java @@ -17,11 +17,13 @@ package org.apache.camel.kamelets.utils.format; -import org.apache.camel.BeanInject; +import java.util.Optional; + import org.apache.camel.CamelContext; import org.apache.camel.CamelContextAware; import org.apache.camel.Exchange; import org.apache.camel.Processor; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; /** * Processor applies data type conversion based on given format name. Searches for matching data type converter @@ -31,20 +33,31 @@ public class DataTypeProcessor implements Processor, CamelContextAware { private CamelContext camelContext; - @BeanInject - private DefaultDataTypeRegistry dataTypeRegistry; + private DefaultDataTypeRegistry registry; private String scheme; private String format; + private DataTypeConverter converter; + @Override public void process(Exchange exchange) throws Exception { if (format == null || format.isEmpty()) { return; } - dataTypeRegistry.lookup(scheme, format) - .ifPresent(converter -> converter.convert(exchange)); + doConverterLookup().ifPresent(converter -> converter.convert(exchange)); + } + + private Optional doConverterLookup() { + if (converter != null) { + return Optional.of(converter); + } + + Optional maybeConverter = registry.lookup(scheme, format); + maybeConverter.ifPresent(dataTypeConverter -> this.converter = dataTypeConverter); + + return maybeConverter; } public void setFormat(String format) { @@ -55,6 +68,10 @@ public void setScheme(String scheme) { this.scheme = scheme; } + public void setRegistry(DefaultDataTypeRegistry dataTypeRegistry) { + this.registry = dataTypeRegistry; + } + @Override public CamelContext getCamelContext() { return camelContext; diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java new file mode 100644 index 000000000..85444a284 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import java.util.Optional; + +import org.apache.camel.CamelContext; +import org.apache.camel.ExtendedCamelContext; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver; +import org.apache.camel.spi.FactoryFinder; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * The default implementation of {@link DataTypeConverterResolver} which tries to find components by using the URI scheme prefix + * and searching for a file of the URI scheme name in the META-INF/services/org/apache/camel/datatype/converter/ directory + * on the classpath. + */ +public class DefaultDataTypeConverterResolver implements DataTypeConverterResolver { + + public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/datatype/converter/"; + + private static final Logger LOG = LoggerFactory.getLogger(DefaultDataTypeConverterResolver.class); + + private FactoryFinder factoryFinder; + + @Override + public Optional resolve(String scheme, String name, CamelContext context) { + String converterName = String.format("%s-%s", scheme, name); + Class type = findConverter(converterName, context); + if (type == null) { + // not found + return Optional.empty(); + } + + if (getLog().isDebugEnabled()) { + getLog().debug("Found data type converter: {} via type: {} via: {}{}", converterName, + type.getName(), factoryFinder.getResourcePath(), converterName); + } + + // create the converter instance + if (DataTypeConverter.class.isAssignableFrom(type)) { + try { + return Optional.of((DataTypeConverter) context.getInjector().newInstance(type)); + } catch (NoClassDefFoundError e) { + LOG.debug("Ignoring converter type: {} as a dependent class could not be found: {}", + type.getCanonicalName(), e, e); + } + } else { + throw new IllegalArgumentException("Type is not a DataTypeConverter implementation. Found: " + type.getName()); + } + + return Optional.empty(); + } + + private Class findConverter(String name, CamelContext context) { + if (factoryFinder == null) { + factoryFinder = context.adapt(ExtendedCamelContext.class).getFactoryFinder(RESOURCE_PATH); + } + return factoryFinder.findClass(name).orElse(null); + } + + protected Logger getLog() { + return LOG; + } + +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java index e7c6e3e87..7105fb4cf 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -25,14 +25,11 @@ import org.apache.camel.CamelContext; import org.apache.camel.CamelContextAware; -import org.apache.camel.ExtendedCamelContext; import org.apache.camel.RuntimeCamelException; -import org.apache.camel.impl.engine.DefaultInjector; -import org.apache.camel.impl.engine.DefaultPackageScanClassResolver; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver; import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry; -import org.apache.camel.spi.PackageScanClassResolver; import org.apache.camel.support.service.ServiceSupport; /** @@ -46,10 +43,10 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR private CamelContext camelContext; - private PackageScanClassResolver resolver; - protected final List dataTypeLoaders = new ArrayList<>(); + private DataTypeConverterResolver dataTypeConverterResolver; + private final Map> dataTypeConverters = new HashMap<>(); @Override @@ -71,30 +68,19 @@ public Optional lookup(String scheme, String name) { return Optional.empty(); } - Optional componentDataTypeConverter = getComponentDataTypeConverters(scheme).stream() - .filter(dtc -> name.equals(dtc.getName())) - .findFirst(); - - if (componentDataTypeConverter.isPresent()) { - return componentDataTypeConverter; + Optional dataTypeConverter = getDataTypeConverter(scheme, name); + if (!dataTypeConverter.isPresent()) { + dataTypeConverter = getDataTypeConverter("camel", name); } - return getDefaultDataTypeConverter(name); + return dataTypeConverter; } @Override protected void doInit() throws Exception { super.doInit(); - if (resolver == null) { - if (camelContext != null) { - resolver = camelContext.adapt(ExtendedCamelContext.class).getPackageScanClassResolver(); - } else { - resolver = new DefaultPackageScanClassResolver(); - } - } - - dataTypeLoaders.add(new AnnotationDataTypeLoader(new DefaultInjector(camelContext), resolver)); + dataTypeLoaders.add(new AnnotationDataTypeLoader()); addDataTypeConverter(new DefaultDataTypeConverter("string", String.class)); addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class)); @@ -113,20 +99,36 @@ protected void doStop() throws Exception { } /** - * Retrieve default data output type from Camel context for given format name. + * Retrieve data type converter for given scheme and format name. First checks for matching bean in Camel registry then + * tries to get from local cache or perform lazy lookup. + * @param scheme * @param name * @return */ - private Optional getDefaultDataTypeConverter(String name) { - Optional dataTypeConverter = getComponentDataTypeConverters("camel").stream() + private Optional getDataTypeConverter(String scheme, String name) { + if (dataTypeConverterResolver == null) { + dataTypeConverterResolver = Optional.ofNullable(camelContext.getRegistry().findSingleByType(DataTypeConverterResolver.class)) + .orElseGet(DefaultDataTypeConverterResolver::new); + } + + // Looking for matching beans in Camel registry first + Optional dataTypeConverter = Optional.ofNullable(camelContext.getRegistry() + .lookupByNameAndType(String.format("%s-%s", scheme, name), DataTypeConverter.class)); + + if (!dataTypeConverter.isPresent()) { + // Try to retrieve converter from preloaded converters in local cache + dataTypeConverter = getComponentDataTypeConverters(scheme).stream() .filter(dtc -> name.equals(dtc.getName())) .findFirst(); + } - if (dataTypeConverter.isPresent()) { - return dataTypeConverter; + if (!dataTypeConverter.isPresent()) { + // Try to lazy load converter via resource path lookup + dataTypeConverter = dataTypeConverterResolver.resolve(scheme, name, camelContext); + dataTypeConverter.ifPresent(converter -> getComponentDataTypeConverters(scheme).add(converter)); } - return Optional.ofNullable(camelContext.getRegistry().lookupByNameAndType(name, DataTypeConverter.class)); + return dataTypeConverter; } /** @@ -135,11 +137,7 @@ private Optional getDefaultDataTypeConverter(String name) { * @return */ private List getComponentDataTypeConverters(String scheme) { - if (!dataTypeConverters.containsKey(scheme)) { - dataTypeConverters.put(scheme, new ArrayList<>()); - } - - return dataTypeConverters.get(scheme); + return dataTypeConverters.computeIfAbsent(scheme, (s) -> new ArrayList<>()); } @Override diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java new file mode 100644 index 000000000..17c48664f --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.spi; + +import java.util.Optional; + +import org.apache.camel.CamelContext; + +/** + * Represents a resolver of data type converters from a URI to be able to lazy load them using some discovery mechanism. + */ +@FunctionalInterface +public interface DataTypeConverterResolver { + + /** + * Attempts to resolve the converter for the given URI. + * + * @param scheme + * @param name + * @param camelContext + * @return + */ + Optional resolve(String scheme, String name, CamelContext camelContext); + + /** + * Attempts to resolve default converter for the given name. + * @param name + * @param camelContext + * @return + */ + default Optional resolve(String name, CamelContext camelContext) { + return resolve("camel", name, camelContext); + } +} diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter similarity index 81% rename from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType rename to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter index b51d34040..adf4eb63f 100644 --- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataType +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter @@ -15,6 +15,4 @@ # limitations under the License. # -org.apache.camel.kamelets.utils.format.converter.standard -org.apache.camel.kamelets.utils.format.converter.aws2.ddb -org.apache.camel.kamelets.utils.format.converter.aws2.s3 \ No newline at end of file +org.apache.camel.kamelets.utils.format.converter.standard \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json new file mode 100644 index 000000000..f0194cc41 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-ddb-json @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.converter.aws2.ddb.Ddb2JsonInputType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary new file mode 100644 index 000000000..ba9c13f34 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3BinaryOutputType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json new file mode 100644 index 000000000..7a7c544f6 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3JsonOutputType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java new file mode 100644 index 000000000..1972b047b --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java @@ -0,0 +1,73 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import java.util.Optional; + +import org.apache.camel.Exchange; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class DefaultDataTypeConverterResolverTest { + + private DefaultCamelContext camelContext; + + private final DefaultDataTypeConverterResolver resolver = new DefaultDataTypeConverterResolver(); + + @BeforeEach + void setup() { + this.camelContext = new DefaultCamelContext(); + } + + @Test + public void shouldHandleUnresolvableDataTypeConverters() throws Exception { + Optional converter = resolver.resolve("unknown", camelContext); + Assertions.assertFalse(converter.isPresent()); + + converter = resolver.resolve("foo", "unknown", camelContext); + Assertions.assertFalse(converter.isPresent()); + } + + @Test + public void shouldResolveDataTypeConverters() throws Exception { + Optional converter = resolver.resolve("jsonObject", camelContext); + Assertions.assertTrue(converter.isPresent()); + Assertions.assertEquals(JsonModelDataType.class, converter.get().getClass()); + + converter = resolver.resolve("foo", "json", camelContext); + Assertions.assertTrue(converter.isPresent()); + Assertions.assertEquals(FooConverter.class, converter.get().getClass()); + } + + public static class FooConverter implements DataTypeConverter { + + @Override + public void convert(Exchange exchange) { + exchange.getMessage().setBody("Foo"); + } + + @Override + public String getName() { + return "foo"; + } + } +} \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java index 2ee4113e3..e077b369d 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java @@ -29,14 +29,11 @@ class DefaultDataTypeRegistryTest { - private DefaultCamelContext camelContext; - - private DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + private final DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); @BeforeEach void setup() { - this.camelContext = new DefaultCamelContext(); - CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + CamelContextAware.trySetCamelContext(dataTypeRegistry, new DefaultCamelContext()); } @Test diff --git a/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject new file mode 100644 index 000000000..2f725f6aa --- /dev/null +++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json new file mode 100644 index 000000000..ca7eaa022 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/foo-json @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.DefaultDataTypeConverterResolverTest$FooConverter \ No newline at end of file diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml index ba2003478..a4e7a1144 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml @@ -123,6 +123,8 @@ spec: value: 'aws2-ddb' - key: format value: '{{inputFormat}}' + - key: registry + value: '{{dataTypeRegistry}}' from: uri: "kamelet:source" steps: diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml index e09cf4aa2..a63af7dc7 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml @@ -129,6 +129,8 @@ spec: value: 'aws2-s3' - key: format value: '{{outputFormat}}' + - key: registry + value: '{{dataTypeRegistry}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: From 609eb4f8d37c1031e1d10599dadd9acf1fadf97d Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Thu, 17 Nov 2022 14:18:25 +0100 Subject: [PATCH 03/28] Fix Jitpack coordinates replacement and use KinD cluster v0.14.0 --- .github/workflows/yaks-tests.yaml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml index defc5733b..e06d1751b 100644 --- a/.github/workflows/yaks-tests.yaml +++ b/.github/workflows/yaks-tests.yaml @@ -61,10 +61,10 @@ jobs: HEAD_REF: ${{ github.head_ref }} HEAD_REPO: ${{ github.event.pull_request.head.repo.full_name }} run: | - echo "Set JitPack dependency coordinates to ${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF/\//'~'}-SNAPSHOT" + echo "Set JitPack dependency coordinates to ${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT" # Overwrite JitPack coordinates in the local Kamelets so the tests can use the utility classes in this PR - find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF/\//'~'}-SNAPSHOT/g" {} + + find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} + - name: Get Camel K CLI run: | curl --fail -L --silent https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz -o kamel.tar.gz @@ -83,6 +83,9 @@ jobs: rm -r _yaks - name: Kubernetes KinD Cluster uses: container-tools/kind-action@v1 + with: + version: v0.14.0 + node_image: kindest/node:v1.23.6@sha256:b1fa224cc6c7ff32455e0b1fd9cbfd3d3bc87ecaa8fcb06961ed1afb3db0f9ae - name: Info run: | kubectl version @@ -95,11 +98,7 @@ jobs: export KAMEL_INSTALL_REGISTRY=$KIND_REGISTRY export KAMEL_INSTALL_REGISTRY_INSECURE=true - kamel install -w - - # TODO replaces the below statement with --operator-env-vars KAMEL_INSTALL_DEFAULT_KAMELETS=false - # when we use camel k 1.8.0 - kubectl delete kamelets --all + kamel install -w --operator-env-vars KAMEL_INSTALL_DEFAULT_KAMELETS=false # Install the local kamelets find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec kubectl apply -f {} \; @@ -110,7 +109,7 @@ jobs: run: | echo "Running tests" yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS - yaks run test/aws-s3 $YAKS_RUN_OPTIONS + yaks run test/aws-s3/aws-s3-uri-binding.feature $YAKS_RUN_OPTIONS yaks run test/extract-field-action $YAKS_RUN_OPTIONS yaks run test/insert-field-action $YAKS_RUN_OPTIONS yaks run test/mail-sink $YAKS_RUN_OPTIONS From 239a37737d0362d1a9076e2a40e947eb1c637f62 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Thu, 17 Nov 2022 17:09:49 +0100 Subject: [PATCH 04/28] Add CloudEvent output type on AWS S3 Kamelet source --- kamelets/aws-ddb-sink.kamelet.yaml | 2 +- kamelets/aws-s3-source.kamelet.yaml | 2 +- library/camel-kamelets-utils/pom.xml | 5 + .../utils/format/DataTypeProcessor.java | 6 ++ .../aws2/s3/AWS2S3CloudEventOutputType.java | 62 ++++++++++++ .../converter/standard/JsonModelDataType.java | 6 +- .../datatype/converter/aws2-s3-cloudevents | 18 ++++ .../utils/format/DataTypeProcessorTest.java | 98 +++++++++++++++++++ .../DefaultDataTypeConverterResolverTest.java | 3 + .../format/DefaultDataTypeRegistryTest.java | 6 ++ .../s3/AWS2S3CloudEventOutputTypeTest.java | 68 +++++++++++++ .../standard/JsonModelDataTypeTest.java | 2 +- .../converter/test/UppercaseDataType.java | 31 ++++++ .../org/apache/camel/DataTypeConverter | 18 ++++ .../camel/datatype/converter/camel-lowercase | 18 ++++ .../kamelets/aws-ddb-sink.kamelet.yaml | 2 +- .../kamelets/aws-s3-source.kamelet.yaml | 2 +- test/aws-s3/aws-s3-uri-binding.yaml | 2 +- 18 files changed, 342 insertions(+), 9 deletions(-) create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java create mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java create mode 100644 library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter create mode 100644 library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml index a4e7a1144..952ecfa17 100644 --- a/kamelets/aws-ddb-sink.kamelet.yaml +++ b/kamelets/aws-ddb-sink.kamelet.yaml @@ -124,7 +124,7 @@ spec: - key: format value: '{{inputFormat}}' - key: registry - value: '{{dataTypeRegistry}}' + value: '#bean:{{dataTypeRegistry}}' from: uri: "kamelet:source" steps: diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml index a63af7dc7..d937f6e5a 100644 --- a/kamelets/aws-s3-source.kamelet.yaml +++ b/kamelets/aws-s3-source.kamelet.yaml @@ -130,7 +130,7 @@ spec: - key: format value: '{{outputFormat}}' - key: registry - value: '{{dataTypeRegistry}}' + value: '#bean:{{dataTypeRegistry}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: diff --git a/library/camel-kamelets-utils/pom.xml b/library/camel-kamelets-utils/pom.xml index 5b1441f31..2aba210da 100644 --- a/library/camel-kamelets-utils/pom.xml +++ b/library/camel-kamelets-utils/pom.xml @@ -82,6 +82,11 @@ camel-aws2-s3 provided + + org.apache.camel + camel-cloudevents + + diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java index 81c583301..def0f2b8b 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java @@ -31,6 +31,8 @@ */ public class DataTypeProcessor implements Processor, CamelContextAware { + public static final String DATA_TYPE_FORMAT_PROPERTY = "CamelDataTypeFormat"; + private CamelContext camelContext; private DefaultDataTypeRegistry registry; @@ -42,6 +44,10 @@ public class DataTypeProcessor implements Processor, CamelContextAware { @Override public void process(Exchange exchange) throws Exception { + if (exchange.hasProperties() && exchange.getProperties().containsKey(DATA_TYPE_FORMAT_PROPERTY)) { + format = exchange.getProperty(DATA_TYPE_FORMAT_PROPERTY, String.class); + } + if (format == null || format.isEmpty()) { return; } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java new file mode 100644 index 000000000..655a4cef0 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.aws2.s3; + +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Map; + +import org.apache.camel.Exchange; +import org.apache.camel.component.aws2.s3.AWS2S3Constants; +import org.apache.camel.component.cloudevents.CloudEvent; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; + +/** + * Output data type represents AWS S3 get object response as CloudEvent V1. The data type sets Camel specific + * CloudEvent headers on the exchange. + */ +@DataType(scheme = "aws2-s3", name = "cloudevents") +public class AWS2S3CloudEventOutputType implements DataTypeConverter { + + @Override + public void convert(Exchange exchange) { + final Map headers = exchange.getMessage().getHeaders(); + + headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet:aws-s3-source"); + headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class)); + headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class)); + headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange)); + headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE, exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_TYPE, String.class)); + + String encoding = exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_ENCODING, String.class); + if (encoding != null) { + headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_ENCODING, encoding); + } + + exchange.getMessage().removeHeaders("CamelAwsS3*"); + } + + private String getEventTime(Exchange exchange) { + final ZonedDateTime created + = ZonedDateTime.ofInstant(Instant.ofEpochMilli(exchange.getCreated()), ZoneId.systemDefault()); + return DateTimeFormatter.ISO_INSTANT.format(created); + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java index 047e6dd51..d8d4ca4e6 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java @@ -36,15 +36,15 @@ @DataType(name = "jsonObject") public class JsonModelDataType implements DataTypeConverter { - public static final String JSON_DATA_TYPE_KEY = "CamelJsonModelDataType"; + public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel"; @Override public void convert(Exchange exchange) { - if (!exchange.hasProperties() || !exchange.getProperties().containsKey(JSON_DATA_TYPE_KEY)) { + if (!exchange.hasProperties() || !exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) { return; } - String type = exchange.getProperty(JSON_DATA_TYPE_KEY, String.class); + String type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class); try (JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), Class.forName(type))) { Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange)); exchange.getMessage().setBody(unmarshalled); diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents new file mode 100644 index 000000000..fafdd926a --- /dev/null +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-cloudevents @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3CloudEventOutputType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java new file mode 100644 index 000000000..0140b6f93 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; + +import org.apache.camel.CamelContextAware; +import org.apache.camel.Exchange; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.support.DefaultExchange; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class DataTypeProcessorTest { + + private final DefaultCamelContext camelContext = new DefaultCamelContext(); + + private final DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + + private final DataTypeProcessor processor = new DataTypeProcessor(); + + @BeforeEach + void setup() { + CamelContextAware.trySetCamelContext(processor, camelContext); + CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + processor.setRegistry(dataTypeRegistry); + } + + @Test + public void shouldApplyDataTypeConverterFromAnnotationLookup() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8))); + processor.setFormat("uppercase"); + processor.process(exchange); + + assertEquals(String.class, exchange.getMessage().getBody().getClass()); + assertEquals("TEST", exchange.getMessage().getBody()); + } + + @Test + public void shouldApplyDataTypeConverterFromResourceLookup() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8))); + processor.setFormat("lowercase"); + processor.process(exchange); + + assertEquals(String.class, exchange.getMessage().getBody().getClass()); + assertEquals("test", exchange.getMessage().getBody()); + } + + @Test + public void shouldHandleUnknownDataType() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8))); + processor.setScheme("foo"); + processor.setFormat("unknown"); + processor.process(exchange); + + assertEquals(ByteArrayInputStream.class, exchange.getMessage().getBody().getClass()); + assertEquals("Test", exchange.getMessage().getBody(String.class)); + } + + public static class LowercaseDataType implements DataTypeConverter { + + @Override + public void convert(Exchange exchange) { + exchange.getMessage().setBody(exchange.getMessage().getBody(String.class).toLowerCase()); + } + + @Override + public String getName() { + return "lowercase"; + } + } + +} \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java index 1972b047b..b281f3143 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolverTest.java @@ -56,6 +56,9 @@ public void shouldResolveDataTypeConverters() throws Exception { converter = resolver.resolve("foo", "json", camelContext); Assertions.assertTrue(converter.isPresent()); Assertions.assertEquals(FooConverter.class, converter.get().getClass()); + + converter = resolver.resolve("camel", "lowercase", camelContext); + Assertions.assertTrue(converter.isPresent()); } public static class FooConverter implements DataTypeConverter { diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java index e077b369d..c72e7897a 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java @@ -22,6 +22,7 @@ import org.apache.camel.CamelContextAware; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType; +import org.apache.camel.kamelets.utils.format.converter.test.UppercaseDataType; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.BeforeEach; @@ -49,6 +50,11 @@ public void shouldLookupDefaultDataTypeConverters() throws Exception { Assertions.assertTrue(converter.isPresent()); Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass()); Assertions.assertEquals(byte[].class, ((DefaultDataTypeConverter) converter.get()).getType()); + converter = dataTypeRegistry.lookup( "lowercase"); + Assertions.assertTrue(converter.isPresent()); + converter = dataTypeRegistry.lookup( "uppercase"); + Assertions.assertTrue(converter.isPresent()); + Assertions.assertEquals(UppercaseDataType.class, converter.get().getClass()); } } \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java new file mode 100644 index 000000000..10c517088 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java @@ -0,0 +1,68 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.aws2.s3; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Optional; + +import org.apache.camel.CamelContextAware; +import org.apache.camel.Exchange; +import org.apache.camel.component.aws2.s3.AWS2S3Constants; +import org.apache.camel.component.cloudevents.CloudEvents; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.support.DefaultExchange; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class AWS2S3CloudEventOutputTypeTest { + + private final DefaultCamelContext camelContext = new DefaultCamelContext(); + + private final AWS2S3CloudEventOutputType outputType = new AWS2S3CloudEventOutputType(); + + @Test + void shouldMapToCloudEvent() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt"); + exchange.getMessage().setHeader(AWS2S3Constants.BUCKET_NAME, "myBucket"); + exchange.getMessage().setHeader(AWS2S3Constants.CONTENT_TYPE, "text/plain"); + exchange.getMessage().setHeader(AWS2S3Constants.CONTENT_ENCODING, StandardCharsets.UTF_8.toString()); + exchange.getMessage().setBody(new ByteArrayInputStream("Test1".getBytes(StandardCharsets.UTF_8))); + outputType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + Assertions.assertFalse(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY)); + assertEquals("kamelet:aws-s3-source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE)); + assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT)); + assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE)); + } + + @Test + public void shouldLookupDataType() throws Exception { + DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + Optional converter = dataTypeRegistry.lookup("aws2-s3", "cloudevents"); + Assertions.assertTrue(converter.isPresent()); + } +} \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java index c175cc6d9..d93da2348 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java @@ -41,7 +41,7 @@ public class JsonModelDataTypeTest { void shouldMapFromStringToJsonModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); - exchange.setProperty(JsonModelDataType.JSON_DATA_TYPE_KEY, Person.class.getName()); + exchange.setProperty(JsonModelDataType.DATA_TYPE_MODEL_PROPERTY, Person.class.getName()); exchange.getMessage().setBody("{ \"name\": \"Sheldon\", \"age\": 29}"); dataType.convert(exchange); diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java new file mode 100644 index 000000000..60604f736 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/test/UppercaseDataType.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.test; + +import org.apache.camel.Exchange; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; + +@DataType(name = "uppercase") +public class UppercaseDataType implements DataTypeConverter { + + @Override + public void convert(Exchange exchange) { + exchange.getMessage().setBody(exchange.getMessage().getBody(String.class).toUpperCase()); + } +} diff --git a/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter new file mode 100644 index 000000000..bf3aaf0d2 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/DataTypeConverter @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +org.apache.camel.kamelets.utils.format.converter.test \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase new file mode 100644 index 000000000..b140a56b8 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/resources/META-INF/services/org/apache/camel/datatype/converter/camel-lowercase @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.DataTypeProcessorTest$LowercaseDataType \ No newline at end of file diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml index a4e7a1144..952ecfa17 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml @@ -124,7 +124,7 @@ spec: - key: format value: '{{inputFormat}}' - key: registry - value: '{{dataTypeRegistry}}' + value: '#bean:{{dataTypeRegistry}}' from: uri: "kamelet:source" steps: diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml index a63af7dc7..d937f6e5a 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml @@ -130,7 +130,7 @@ spec: - key: format value: '{{outputFormat}}' - key: registry - value: '{{dataTypeRegistry}}' + value: '#bean:{{dataTypeRegistry}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml index 505228185..14d420f93 100644 --- a/test/aws-s3/aws-s3-uri-binding.yaml +++ b/test/aws-s3/aws-s3-uri-binding.yaml @@ -28,7 +28,7 @@ spec: properties: bucketNameOrArn: ${aws.s3.bucketNameOrArn} overrideEndpoint: true - outputFormat: json + outputFormat: cloudevents uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL} accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} From 911ac238963e4a5a10d11445039800cba862997b Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 18 Nov 2022 09:30:44 +0100 Subject: [PATCH 05/28] Use log-sink Kamelet and show headers --- test/aws-s3/aws-s3-uri-binding.yaml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml index 14d420f93..e21d54f4d 100644 --- a/test/aws-s3/aws-s3-uri-binding.yaml +++ b/test/aws-s3/aws-s3-uri-binding.yaml @@ -34,4 +34,9 @@ spec: secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION} sink: - uri: log:info + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: log-sink + properties: + showHeaders: true From a8c22145cecbc66033ba4bed471e643714a98d85 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 18 Nov 2022 10:48:51 +0100 Subject: [PATCH 06/28] Fail on missing data type and add log output --- .../utils/format/DataTypeProcessor.java | 22 +++++++++- .../DefaultDataTypeConverterResolver.java | 5 +++ .../utils/format/DefaultDataTypeRegistry.java | 44 ++++++++++++++++--- .../utils/format/DataTypeProcessorTest.java | 3 +- 4 files changed, 65 insertions(+), 9 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java index def0f2b8b..110c5cd44 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DataTypeProcessor.java @@ -21,9 +21,12 @@ import org.apache.camel.CamelContext; import org.apache.camel.CamelContextAware; +import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Processor applies data type conversion based on given format name. Searches for matching data type converter @@ -33,6 +36,8 @@ public class DataTypeProcessor implements Processor, CamelContextAware { public static final String DATA_TYPE_FORMAT_PROPERTY = "CamelDataTypeFormat"; + private static final Logger LOG = LoggerFactory.getLogger(DataTypeProcessor.class); + private CamelContext camelContext; private DefaultDataTypeRegistry registry; @@ -40,6 +45,8 @@ public class DataTypeProcessor implements Processor, CamelContextAware { private String scheme; private String format; + private boolean ignoreMissingDataType = false; + private DataTypeConverter converter; @Override @@ -52,7 +59,16 @@ public void process(Exchange exchange) throws Exception { return; } - doConverterLookup().ifPresent(converter -> converter.convert(exchange)); + Optional dataTypeConverter = doConverterLookup(); + dataTypeConverter.ifPresent(converter -> converter.convert(exchange)); + + if (!dataTypeConverter.isPresent()) { + LOG.debug("Unable to find data type for scheme {} and format name {}", scheme, format); + + if (!ignoreMissingDataType) { + throw new CamelExecutionException(String.format("Missing data type for scheme %s and format name %s", scheme, format), exchange); + } + } } private Optional doConverterLookup() { @@ -78,6 +94,10 @@ public void setRegistry(DefaultDataTypeRegistry dataTypeRegistry) { this.registry = dataTypeRegistry; } + public void setIgnoreMissingDataType(boolean ignoreMissingDataType) { + this.ignoreMissingDataType = ignoreMissingDataType; + } + @Override public CamelContext getCamelContext() { return camelContext; diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java index 85444a284..5fdaa790c 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java @@ -43,6 +43,11 @@ public class DefaultDataTypeConverterResolver implements DataTypeConverterResolv @Override public Optional resolve(String scheme, String name, CamelContext context) { String converterName = String.format("%s-%s", scheme, name); + + if (getLog().isDebugEnabled()) { + getLog().debug("Resolving data type converter {} via: {}{}", converterName, RESOURCE_PATH, converterName); + } + Class type = findConverter(converterName, context); if (type == null) { // not found diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java index 7105fb4cf..0e262d286 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -31,6 +31,9 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry; import org.apache.camel.support.service.ServiceSupport; +import org.apache.camel.util.ObjectHelper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Default data type registry able to resolve data types converters in the project. Data types may be defined at the component level @@ -41,6 +44,8 @@ */ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeRegistry, CamelContextAware { + private static final Logger LOG = LoggerFactory.getLogger(DefaultDataTypeRegistry.class); + private CamelContext camelContext; protected final List dataTypeLoaders = new ArrayList<>(); @@ -51,12 +56,21 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR @Override public void addDataTypeConverter(String scheme, DataTypeConverter converter) { + if (LOG.isDebugEnabled()) { + LOG.debug("Adding data type for scheme {} and name {}", scheme, converter.getName()); + } + this.getComponentDataTypeConverters(scheme).add(converter); } @Override public Optional lookup(String scheme, String name) { + if (LOG.isTraceEnabled()) { + LOG.trace("Searching for data type with scheme {} and name {}", scheme, name); + } + if (dataTypeLoaders.isEmpty()) { + LOG.trace("Lazy initializing data type registry"); try { doInit(); } catch (Exception e) { @@ -89,6 +103,8 @@ protected void doInit() throws Exception { CamelContextAware.trySetCamelContext(loader, getCamelContext()); loader.load(this); } + + LOG.debug("Loaded {} initial data type converters", dataTypeConverters.size()); } @Override @@ -115,17 +131,31 @@ private Optional getDataTypeConverter(String scheme, String n Optional dataTypeConverter = Optional.ofNullable(camelContext.getRegistry() .lookupByNameAndType(String.format("%s-%s", scheme, name), DataTypeConverter.class)); - if (!dataTypeConverter.isPresent()) { - // Try to retrieve converter from preloaded converters in local cache - dataTypeConverter = getComponentDataTypeConverters(scheme).stream() + if (dataTypeConverter.isPresent()) { + if (LOG.isDebugEnabled()) { + LOG.debug("Found data type {} for scheme {} and name {} in Camel registry", ObjectHelper.name(dataTypeConverter.get().getClass()), scheme, name); + } + return dataTypeConverter; + } + + // Try to retrieve converter from preloaded converters in local cache + dataTypeConverter = getComponentDataTypeConverters(scheme).stream() .filter(dtc -> name.equals(dtc.getName())) .findFirst(); + + if (dataTypeConverter.isPresent()) { + if (LOG.isDebugEnabled()) { + LOG.debug("Found data type {} for scheme {} and name {}", ObjectHelper.name(dataTypeConverter.get().getClass()), scheme, name); + } + return dataTypeConverter; } - if (!dataTypeConverter.isPresent()) { - // Try to lazy load converter via resource path lookup - dataTypeConverter = dataTypeConverterResolver.resolve(scheme, name, camelContext); - dataTypeConverter.ifPresent(converter -> getComponentDataTypeConverters(scheme).add(converter)); + // Try to lazy load converter via resource path lookup + dataTypeConverter = dataTypeConverterResolver.resolve(scheme, name, camelContext); + dataTypeConverter.ifPresent(converter -> getComponentDataTypeConverters(scheme).add(converter)); + + if (LOG.isDebugEnabled() && dataTypeConverter.isPresent()) { + LOG.debug("Resolved data type {} for scheme {} and name {} via resource path", ObjectHelper.name(dataTypeConverter.get().getClass()), scheme, name); } return dataTypeConverter; diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java index 0140b6f93..d2c2554a8 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DataTypeProcessorTest.java @@ -70,10 +70,11 @@ public void shouldApplyDataTypeConverterFromResourceLookup() throws Exception { } @Test - public void shouldHandleUnknownDataType() throws Exception { + public void shouldIgnoreUnknownDataType() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setBody(new ByteArrayInputStream("Test".getBytes(StandardCharsets.UTF_8))); + processor.setIgnoreMissingDataType(true); processor.setScheme("foo"); processor.setFormat("unknown"); processor.process(exchange); From 7a4253879173fa78cf62d95689ef8aff5024c0f3 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 18 Nov 2022 15:17:34 +0100 Subject: [PATCH 07/28] Make sure data type resolver works on all runtimes --- .../DefaultDataTypeConverterResolver.java | 42 +++++-------------- .../utils/format/DefaultDataTypeRegistry.java | 7 ++-- 2 files changed, 15 insertions(+), 34 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java index 5fdaa790c..9d5e8b23f 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverterResolver.java @@ -23,7 +23,7 @@ import org.apache.camel.ExtendedCamelContext; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver; -import org.apache.camel.spi.FactoryFinder; +import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,51 +34,31 @@ */ public class DefaultDataTypeConverterResolver implements DataTypeConverterResolver { - public static final String RESOURCE_PATH = "META-INF/services/org/apache/camel/datatype/converter/"; + public static final String DATA_TYPE_CONVERTER_RESOURCE_PATH = "META-INF/services/org/apache/camel/datatype/converter/"; private static final Logger LOG = LoggerFactory.getLogger(DefaultDataTypeConverterResolver.class); - private FactoryFinder factoryFinder; - @Override public Optional resolve(String scheme, String name, CamelContext context) { String converterName = String.format("%s-%s", scheme, name); if (getLog().isDebugEnabled()) { - getLog().debug("Resolving data type converter {} via: {}{}", converterName, RESOURCE_PATH, converterName); - } - - Class type = findConverter(converterName, context); - if (type == null) { - // not found - return Optional.empty(); + getLog().debug("Resolving data type converter {} via: {}{}", converterName, DATA_TYPE_CONVERTER_RESOURCE_PATH, converterName); } - if (getLog().isDebugEnabled()) { + Optional converter = findConverter(converterName, context); + if (getLog().isDebugEnabled() && converter.isPresent()) { getLog().debug("Found data type converter: {} via type: {} via: {}{}", converterName, - type.getName(), factoryFinder.getResourcePath(), converterName); + ObjectHelper.name(converter.getClass()), DATA_TYPE_CONVERTER_RESOURCE_PATH, converterName); } - // create the converter instance - if (DataTypeConverter.class.isAssignableFrom(type)) { - try { - return Optional.of((DataTypeConverter) context.getInjector().newInstance(type)); - } catch (NoClassDefFoundError e) { - LOG.debug("Ignoring converter type: {} as a dependent class could not be found: {}", - type.getCanonicalName(), e, e); - } - } else { - throw new IllegalArgumentException("Type is not a DataTypeConverter implementation. Found: " + type.getName()); - } - - return Optional.empty(); + return converter; } - private Class findConverter(String name, CamelContext context) { - if (factoryFinder == null) { - factoryFinder = context.adapt(ExtendedCamelContext.class).getFactoryFinder(RESOURCE_PATH); - } - return factoryFinder.findClass(name).orElse(null); + private Optional findConverter(String name, CamelContext context) { + return context.adapt(ExtendedCamelContext.class) + .getBootstrapFactoryFinder(DATA_TYPE_CONVERTER_RESOURCE_PATH) + .newInstance(name, DataTypeConverter.class); } protected Logger getLog() { diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java index 0e262d286..18effcc95 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -30,6 +30,7 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver; import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry; +import org.apache.camel.support.CamelContextHelper; import org.apache.camel.support.service.ServiceSupport; import org.apache.camel.util.ObjectHelper; import org.slf4j.Logger; @@ -104,7 +105,7 @@ protected void doInit() throws Exception { loader.load(this); } - LOG.debug("Loaded {} initial data type converters", dataTypeConverters.size()); + LOG.debug("Loaded {} schemes holding {} data type converters", dataTypeConverters.size(), dataTypeConverters.values().stream().mapToInt(List::size).sum()); } @Override @@ -128,8 +129,8 @@ private Optional getDataTypeConverter(String scheme, String n } // Looking for matching beans in Camel registry first - Optional dataTypeConverter = Optional.ofNullable(camelContext.getRegistry() - .lookupByNameAndType(String.format("%s-%s", scheme, name), DataTypeConverter.class)); + Optional dataTypeConverter = Optional.ofNullable(CamelContextHelper.lookup(getCamelContext(), + String.format("%s-%s", scheme, name), DataTypeConverter.class)); if (dataTypeConverter.isPresent()) { if (LOG.isDebugEnabled()) { From 2795728e1a192f70cb6892d4215d478a1efad3cf Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 18 Nov 2022 15:26:17 +0100 Subject: [PATCH 08/28] Load S3 converters via annotation scan --- .../META-INF/services/org/apache/camel/DataTypeConverter | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter index adf4eb63f..46b63db29 100644 --- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter @@ -15,4 +15,5 @@ # limitations under the License. # -org.apache.camel.kamelets.utils.format.converter.standard \ No newline at end of file +org.apache.camel.kamelets.utils.format.converter.standard +org.apache.camel.kamelets.utils.format.converter.aws2.s3 From 8420067285e16834be55007729b65fa723fee835 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 18 Nov 2022 17:28:11 +0100 Subject: [PATCH 09/28] Preserve AWS S3 Key header as it is required during onCompletion --- .../format/converter/aws2/s3/AWS2S3CloudEventOutputType.java | 2 -- .../converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java index 655a4cef0..135790543 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java @@ -50,8 +50,6 @@ public void convert(Exchange exchange) { if (encoding != null) { headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_ENCODING, encoding); } - - exchange.getMessage().removeHeaders("CamelAwsS3*"); } private String getEventTime(Exchange exchange) { diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java index 10c517088..0a71f90dc 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java @@ -52,7 +52,7 @@ void shouldMapToCloudEvent() throws Exception { outputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); - Assertions.assertFalse(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY)); + Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY)); assertEquals("kamelet:aws-s3-source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE)); assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT)); assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE)); From 5f84f840b1a420b1e7ff605082fd14c2bed748ca Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 18 Nov 2022 19:44:05 +0100 Subject: [PATCH 10/28] Remove AWS S3 Json output type Not a robust solution at the moment --- .../aws2/s3/AWS2S3JsonOutputType.java | 63 ------------------- .../camel/datatype/converter/aws2-s3-json | 18 ------ ...t.java => AWS2S3BinaryOutputTypeTest.java} | 31 +++++---- 3 files changed, 14 insertions(+), 98 deletions(-) delete mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java delete mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json rename library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/{AWS2S3JsonOutputTypeTest.java => AWS2S3BinaryOutputTypeTest.java} (76%) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java deleted file mode 100644 index 74736d675..000000000 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputType.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.camel.kamelets.utils.format.converter.aws2.s3; - -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -import org.apache.camel.CamelExecutionException; -import org.apache.camel.Exchange; -import org.apache.camel.component.aws2.s3.AWS2S3Constants; -import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; -import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; -import software.amazon.awssdk.core.ResponseInputStream; -import software.amazon.awssdk.utils.IoUtils; - -/** - * Json output data type represents file name as key and file content as Json structure. - *

- * Example Json structure: { "key": "myFile.txt", "content": "Hello", } - */ -@DataType(scheme = "aws2-s3", name = "json") -public class AWS2S3JsonOutputType implements DataTypeConverter { - - private static final String TEMPLATE = "{" + - "\"key\": \"%s\", " + - "\"content\": \"%s\"" + - "}"; - - @Override - public void convert(Exchange exchange) { - String key = exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class); - - ResponseInputStream bodyInputStream = exchange.getMessage().getBody(ResponseInputStream.class); - if (bodyInputStream != null) { - try { - exchange.getMessage().setBody(String.format(TEMPLATE, key, IoUtils.toUtf8String(bodyInputStream))); - return; - } catch (IOException e) { - throw new CamelExecutionException("Failed to convert AWS S3 body to Json", exchange, e); - } - } - - byte[] bodyContent = exchange.getMessage().getBody(byte[].class); - if (bodyContent != null) { - exchange.getMessage().setBody(String.format(TEMPLATE, key, new String(bodyContent, StandardCharsets.UTF_8))); - } - } -} diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json deleted file mode 100644 index 7a7c544f6..000000000 --- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-json +++ /dev/null @@ -1,18 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3JsonOutputType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java similarity index 76% rename from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java rename to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java index 53357adde..26b359f47 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3JsonOutputTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java @@ -35,14 +35,14 @@ import static org.junit.jupiter.api.Assertions.assertEquals; -public class AWS2S3JsonOutputTypeTest { +public class AWS2S3BinaryOutputTypeTest { private final DefaultCamelContext camelContext = new DefaultCamelContext(); - private final AWS2S3JsonOutputType outputType = new AWS2S3JsonOutputType(); + private final AWS2S3BinaryOutputType outputType = new AWS2S3BinaryOutputType(); @Test - void shouldMapFromStringToJsonModel() throws Exception { + void shouldMapFromStringToBytesModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt"); @@ -50,13 +50,11 @@ void shouldMapFromStringToJsonModel() throws Exception { outputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); - assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY)); - - assertJsonModelBody(exchange, "test1.txt", "Test1"); + assertBinaryBody(exchange, "test1.txt", "Test1"); } @Test - void shouldMapFromBytesToJsonModel() throws Exception { + void shouldMapFromBytesToBytesModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test2.txt"); @@ -64,13 +62,11 @@ void shouldMapFromBytesToJsonModel() throws Exception { outputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); - assertEquals("test2.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY)); - - assertJsonModelBody(exchange, "test2.txt", "Test2"); + assertBinaryBody(exchange, "test2.txt", "Test2"); } @Test - void shouldMapFromInputStreamToJsonModel() throws Exception { + void shouldMapFromInputStreamToBytesModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test3.txt"); @@ -79,20 +75,21 @@ void shouldMapFromInputStreamToJsonModel() throws Exception { outputType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); - assertEquals("test3.txt", exchange.getMessage().getHeader(AWS2S3Constants.KEY)); - - assertJsonModelBody(exchange, "test3.txt", "Test3"); + assertBinaryBody(exchange, "test3.txt", "Test3"); } @Test public void shouldLookupDataType() throws Exception { DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); - Optional converter = dataTypeRegistry.lookup("aws2-s3", "json"); + Optional converter = dataTypeRegistry.lookup("aws2-s3", "binary"); Assertions.assertTrue(converter.isPresent()); } - private static void assertJsonModelBody(Exchange exchange, String key, String content) { - assertEquals(String.format("{\"key\": \"%s\", \"content\": \"%s\"}", key, content), exchange.getMessage().getBody()); + private static void assertBinaryBody(Exchange exchange, String key, String content) { + assertEquals(key, exchange.getMessage().getHeader(AWS2S3Constants.KEY)); + + assertEquals(byte[].class, exchange.getMessage().getBody().getClass()); + assertEquals(content, exchange.getMessage().getBody(String.class)); } } From e82a3c08c181fe64768660952c981c99c3e88cbc Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 18 Nov 2022 19:52:07 +0100 Subject: [PATCH 11/28] Load AWS DDB converters via annotation scan --- .../META-INF/services/org/apache/camel/DataTypeConverter | 1 + 1 file changed, 1 insertion(+) diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter index 46b63db29..81e102565 100644 --- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/DataTypeConverter @@ -17,3 +17,4 @@ org.apache.camel.kamelets.utils.format.converter.standard org.apache.camel.kamelets.utils.format.converter.aws2.s3 +org.apache.camel.kamelets.utils.format.converter.aws2.ddb From c94bee7d4dfe0426b153bc71c21a4f89342f1298 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Mon, 21 Nov 2022 16:32:33 +0100 Subject: [PATCH 12/28] Fix AWS DDB sink Kamelet --- kamelets/aws-ddb-sink.kamelet.yaml | 4 ++-- .../src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml index 952ecfa17..87b338ee4 100644 --- a/kamelets/aws-ddb-sink.kamelet.yaml +++ b/kamelets/aws-ddb-sink.kamelet.yaml @@ -129,8 +129,8 @@ spec: uri: "kamelet:source" steps: - set-property: - name: operation - constant: "{{operation}}" + name: operation + constant: "{{operation}}" - process: ref: "{{inputTypeProcessor}}" - to: diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml index 952ecfa17..87b338ee4 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml @@ -129,8 +129,8 @@ spec: uri: "kamelet:source" steps: - set-property: - name: operation - constant: "{{operation}}" + name: operation + constant: "{{operation}}" - process: ref: "{{inputTypeProcessor}}" - to: From d14ae284da1d1f60c192ffeaf16031f37ea9b3bc Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Mon, 21 Nov 2022 16:33:00 +0100 Subject: [PATCH 13/28] Enhance YAKS tests with AWS S3 data type test --- test/aws-s3/README.md | 2 +- test/aws-s3/aws-s3-data-type.feature | 48 +++++++++++++++++++ ...feature => aws-s3-knative-binding.feature} | 28 +++++------ ...3-to-inmem.yaml => aws-s3-to-knative.yaml} | 16 +++++-- test/aws-s3/aws-s3-uri-binding.yaml | 1 - test/aws-s3/yaks-config.yaml | 9 +++- ...{inmem-to-log.yaml => knative-to-log.yaml} | 17 +++++-- 7 files changed, 93 insertions(+), 28 deletions(-) create mode 100644 test/aws-s3/aws-s3-data-type.feature rename test/aws-s3/{aws-s3-inmem-binding.feature => aws-s3-knative-binding.feature} (58%) rename test/aws-s3/{aws-s3-to-inmem.yaml => aws-s3-to-knative.yaml} (83%) rename test/utils/{inmem-to-log.yaml => knative-to-log.yaml} (78%) diff --git a/test/aws-s3/README.md b/test/aws-s3/README.md index 6e7d7315f..e71f403f3 100644 --- a/test/aws-s3/README.md +++ b/test/aws-s3/README.md @@ -70,7 +70,7 @@ $ yaks test aws-s3-uri-binding.feature To run tests with binding to Knative channel: ```shell script -$ yaks test aws-s3-inmem-binding.feature +$ yaks test aws-s3-knative-binding.feature ``` You will be provided with the test log output and the test results. diff --git a/test/aws-s3/aws-s3-data-type.feature b/test/aws-s3/aws-s3-data-type.feature new file mode 100644 index 000000000..3ec04bde7 --- /dev/null +++ b/test/aws-s3/aws-s3-data-type.feature @@ -0,0 +1,48 @@ +Feature: AWS S3 Kamelet - output data type + + Background: + Given Knative event consumer timeout is 20000 ms + Given Camel K resource polling configuration + | maxAttempts | 200 | + | delayBetweenAttempts | 4000 | + Given variables + | aws.s3.output | cloudevents | + | aws.s3.bucketNameOrArn | mybucket | + | aws.s3.message | Hello from S3 Kamelet | + | aws.s3.key | hello.txt | + + Scenario: Start LocalStack container + Given Enable service S3 + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-S3 client + Given New global Camel context + Given load to Camel registry amazonS3Client.groovy + + Scenario: Create AWS-S3 Kamelet to Knative binding + Given variable loginfo is "Installed features" + When load KameletBinding aws-s3-to-knative.yaml + And KameletBinding aws-s3-to-knative is available + And Camel K integration aws-s3-to-knative is running + Then Camel K integration aws-s3-to-knative should print ${loginfo} + + Scenario: Verify Kamelet source + Given create Knative event consumer service event-consumer-service + Given create Knative trigger event-service-trigger on service event-consumer-service with filter on attributes + | type | org.apache.camel.event | + Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" + Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} + Then expect Knative event data: ${aws.s3.message} + And verify Knative event + | type | org.apache.camel.event | + | source | @ignore@ | + | subject | @ignore@ | + | id | @ignore@ | + + Scenario: Remove Camel K resources + Given delete KameletBinding aws-s3-to-knative + Given delete Kubernetes service event-consumer-service + + Scenario: Stop container + Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-inmem-binding.feature b/test/aws-s3/aws-s3-knative-binding.feature similarity index 58% rename from test/aws-s3/aws-s3-inmem-binding.feature rename to test/aws-s3/aws-s3-knative-binding.feature index d67e77984..c143bbeee 100644 --- a/test/aws-s3/aws-s3-inmem-binding.feature +++ b/test/aws-s3/aws-s3-knative-binding.feature @@ -1,5 +1,5 @@ @knative -Feature: AWS S3 Kamelet - binding to InMemoryChannel +Feature: AWS S3 Kamelet - binding to Knative Background: Given Kamelet aws-s3-source is available @@ -17,33 +17,31 @@ Feature: AWS S3 Kamelet - binding to InMemoryChannel Given New global Camel context Given load to Camel registry amazonS3Client.groovy - Scenario: Create Knative broker and channel + Scenario: Create Knative broker Given create Knative broker default And Knative broker default is running - Given create Knative channel messages Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding Given variable loginfo is "Installed features" - Given load KameletBinding aws-s3-to-inmem.yaml - Given load KameletBinding inmem-to-log.yaml - Then KameletBinding aws-s3-to-inmem should be available - And KameletBinding inmem-to-log should be available - And Camel K integration aws-s3-to-inmem is running - And Camel K integration inmem-to-log is running - And Camel K integration aws-s3-to-inmem should print ${loginfo} - And Camel K integration inmem-to-log should print ${loginfo} + Given load KameletBinding aws-s3-to-knative.yaml + Given load KameletBinding knative-to-log.yaml + Then KameletBinding aws-s3-to-knative should be available + And KameletBinding knative-to-log should be available + And Camel K integration aws-s3-to-knative is running + And Camel K integration knative-to-log is running + And Camel K integration aws-s3-to-knative should print ${loginfo} + And Camel K integration knative-to-log should print ${loginfo} Then sleep 10000 ms Scenario: Verify Kamelet source Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} - Then Camel K integration inmem-to-log should print ${aws.s3.message} + Then Camel K integration knative-to-log should print ${aws.s3.message} Scenario: Remove resources - Given delete KameletBinding aws-s3-to-inmem - Given delete KameletBinding inmem-to-log + Given delete KameletBinding aws-s3-to-knative + Given delete KameletBinding knative-to-log Given delete Knative broker default - Given delete Knative channel messages Scenario: Stop container Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-to-inmem.yaml b/test/aws-s3/aws-s3-to-knative.yaml similarity index 83% rename from test/aws-s3/aws-s3-to-inmem.yaml rename to test/aws-s3/aws-s3-to-knative.yaml index ce880028d..e99ee20f1 100644 --- a/test/aws-s3/aws-s3-to-inmem.yaml +++ b/test/aws-s3/aws-s3-to-knative.yaml @@ -18,7 +18,7 @@ apiVersion: camel.apache.org/v1alpha1 kind: KameletBinding metadata: - name: aws-s3-to-inmem + name: aws-s3-to-knative spec: source: ref: @@ -28,12 +28,20 @@ spec: properties: bucketNameOrArn: ${aws.s3.bucketNameOrArn} overrideEndpoint: true + outputFormat: ${aws.s3.output} uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL} accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION} + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: log-sink + properties: + showHeaders: true sink: ref: - kind: InMemoryChannel - apiVersion: messaging.knative.dev/v1 - name: messages + kind: Broker + apiVersion: eventing.knative.dev/v1 + name: default diff --git a/test/aws-s3/aws-s3-uri-binding.yaml b/test/aws-s3/aws-s3-uri-binding.yaml index e21d54f4d..b3612219f 100644 --- a/test/aws-s3/aws-s3-uri-binding.yaml +++ b/test/aws-s3/aws-s3-uri-binding.yaml @@ -28,7 +28,6 @@ spec: properties: bucketNameOrArn: ${aws.s3.bucketNameOrArn} overrideEndpoint: true - outputFormat: cloudevents uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL} accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml index f36d136cd..0d70ba755 100644 --- a/test/aws-s3/yaks-config.yaml +++ b/test/aws-s3/yaks-config.yaml @@ -42,12 +42,17 @@ config: - aws-s3-to-log-uri-based.groovy - aws-s3-to-log-secret-based.groovy - aws-s3-uri-binding.yaml - - aws-s3-to-inmem.yaml - - ../utils/inmem-to-log.yaml + - aws-s3-to-knative.yaml + - ../utils/knative-to-log.yaml cucumber: tags: - "not @ignored" settings: + loggers: + - name: Logger.Message_IN + level: DEBUG + - name: Logger.Message_OUT + level: DEBUG dependencies: - groupId: com.amazonaws artifactId: aws-java-sdk-kinesis diff --git a/test/utils/inmem-to-log.yaml b/test/utils/knative-to-log.yaml similarity index 78% rename from test/utils/inmem-to-log.yaml rename to test/utils/knative-to-log.yaml index 8b5dc51e7..c03e6de29 100644 --- a/test/utils/inmem-to-log.yaml +++ b/test/utils/knative-to-log.yaml @@ -18,12 +18,19 @@ apiVersion: camel.apache.org/v1alpha1 kind: KameletBinding metadata: - name: inmem-to-log + name: knative-to-log spec: source: ref: - kind: InMemoryChannel - apiVersion: messaging.knative.dev/v1 - name: messages + kind: Broker + apiVersion: eventing.knative.dev/v1 + name: default + properties: + type: org.apache.camel.event sink: - uri: log:info + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: log-sink + properties: + showHeaders: true From 495ddf24078e33dc43b5eb3981424ac58f47892d Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Mon, 21 Nov 2022 22:01:09 +0100 Subject: [PATCH 14/28] Fix cloud event type and do not set data content type Setting the data content type breaks the Camel Knative producer --- .../converter/aws2/s3/AWS2S3CloudEventOutputType.java | 8 +------- .../converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java | 2 +- 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java index 135790543..399e01111 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java @@ -40,16 +40,10 @@ public class AWS2S3CloudEventOutputType implements DataTypeConverter { public void convert(Exchange exchange) { final Map headers = exchange.getMessage().getHeaders(); - headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet:aws-s3-source"); + headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source"); headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class)); headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class)); headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange)); - headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_TYPE, exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_TYPE, String.class)); - - String encoding = exchange.getMessage().getHeader(AWS2S3Constants.CONTENT_ENCODING, String.class); - if (encoding != null) { - headers.put(CloudEvent.CAMEL_CLOUD_EVENT_DATA_CONTENT_ENCODING, encoding); - } } private String getEventTime(Exchange exchange) { diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java index 0a71f90dc..e139b2b9c 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java @@ -53,7 +53,7 @@ void shouldMapToCloudEvent() throws Exception { Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY)); - assertEquals("kamelet:aws-s3-source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE)); + assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE)); assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT)); assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE)); } From 4e28c942e0baa757b60f1e679de7f520f23d93b4 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Tue, 22 Nov 2022 12:35:45 +0100 Subject: [PATCH 15/28] Enhance data type AWS S3 YAKS tests --- test/aws-s3/aws-s3-cloudevents.feature | 48 +++++++++++++++++++ ...ta-type.feature => aws-s3-knative.feature} | 5 +- 2 files changed, 50 insertions(+), 3 deletions(-) create mode 100644 test/aws-s3/aws-s3-cloudevents.feature rename test/aws-s3/{aws-s3-data-type.feature => aws-s3-knative.feature} (94%) diff --git a/test/aws-s3/aws-s3-cloudevents.feature b/test/aws-s3/aws-s3-cloudevents.feature new file mode 100644 index 000000000..1e2f7d1ef --- /dev/null +++ b/test/aws-s3/aws-s3-cloudevents.feature @@ -0,0 +1,48 @@ +Feature: AWS S3 Kamelet - cloud events data type + + Background: + Given Knative event consumer timeout is 20000 ms + Given Camel K resource polling configuration + | maxAttempts | 200 | + | delayBetweenAttempts | 4000 | + Given variables + | aws.s3.output | cloudevents | + | aws.s3.bucketNameOrArn | mybucket | + | aws.s3.message | Hello from S3 Kamelet | + | aws.s3.key | hello.txt | + + Scenario: Start LocalStack container + Given Enable service S3 + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-S3 client + Given New global Camel context + Given load to Camel registry amazonS3Client.groovy + + Scenario: Create AWS-S3 Kamelet to Knative binding + Given variable loginfo is "Installed features" + When load KameletBinding aws-s3-to-knative.yaml + And KameletBinding aws-s3-to-knative is available + And Camel K integration aws-s3-to-knative is running + Then Camel K integration aws-s3-to-knative should print ${loginfo} + + Scenario: Verify Kamelet source + Given create Knative event consumer service event-consumer-service + Given create Knative trigger event-service-trigger on service event-consumer-service with filter on attributes + | type | kamelet.aws.s3.source | + Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" + Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} + Then expect Knative event data: ${aws.s3.message} + And verify Knative event + | type | kamelet.aws.s3.source | + | source | ${aws.s3.bucketNameOrArn} | + | subject | ${aws.s3.key} | + | id | @ignore@ | + + Scenario: Remove Camel K resources + Given delete KameletBinding aws-s3-to-knative + Given delete Kubernetes service event-consumer-service + + Scenario: Stop container + Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-data-type.feature b/test/aws-s3/aws-s3-knative.feature similarity index 94% rename from test/aws-s3/aws-s3-data-type.feature rename to test/aws-s3/aws-s3-knative.feature index 3ec04bde7..148ec1d6f 100644 --- a/test/aws-s3/aws-s3-data-type.feature +++ b/test/aws-s3/aws-s3-knative.feature @@ -1,4 +1,4 @@ -Feature: AWS S3 Kamelet - output data type +Feature: AWS S3 Kamelet - Knative binding Background: Given Knative event consumer timeout is 20000 ms @@ -6,7 +6,7 @@ Feature: AWS S3 Kamelet - output data type | maxAttempts | 200 | | delayBetweenAttempts | 4000 | Given variables - | aws.s3.output | cloudevents | + | aws.s3.output | string | | aws.s3.bucketNameOrArn | mybucket | | aws.s3.message | Hello from S3 Kamelet | | aws.s3.key | hello.txt | @@ -37,7 +37,6 @@ Feature: AWS S3 Kamelet - output data type And verify Knative event | type | org.apache.camel.event | | source | @ignore@ | - | subject | @ignore@ | | id | @ignore@ | Scenario: Remove Camel K resources From 14cd8066d5a257b2a79e892c2aa0cbbbdf60cbbd Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Wed, 23 Nov 2022 21:56:04 +0100 Subject: [PATCH 16/28] Add option to disable data type registry classpath scan --- .../kamelets/utils/format/DefaultDataTypeRegistry.java | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java index 18effcc95..d393e6c77 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -53,6 +53,8 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR private DataTypeConverterResolver dataTypeConverterResolver; + private boolean classpathScan = true; + private final Map> dataTypeConverters = new HashMap<>(); @Override @@ -95,7 +97,9 @@ public Optional lookup(String scheme, String name) { protected void doInit() throws Exception { super.doInit(); - dataTypeLoaders.add(new AnnotationDataTypeLoader()); + if (classpathScan) { + dataTypeLoaders.add(new AnnotationDataTypeLoader()); + } addDataTypeConverter(new DefaultDataTypeConverter("string", String.class)); addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class)); @@ -171,6 +175,10 @@ private List getComponentDataTypeConverters(String scheme) { return dataTypeConverters.computeIfAbsent(scheme, (s) -> new ArrayList<>()); } + public void setClasspathScan(boolean classpathScan) { + this.classpathScan = classpathScan; + } + @Override public CamelContext getCamelContext() { return camelContext; From b67651e39be8853c52c48bd5d2871e4793fbdedb Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Thu, 24 Nov 2022 16:49:34 +0100 Subject: [PATCH 17/28] Set proper media types --- .../format/DefaultDataTypeConverter.java | 25 +++++++++++++++++- .../utils/format/DefaultDataTypeRegistry.java | 7 ++--- .../converter/aws2/ddb/Ddb2JsonInputType.java | 2 +- .../aws2/s3/AWS2S3BinaryOutputType.java | 2 +- .../aws2/s3/AWS2S3CloudEventOutputType.java | 2 +- .../converter/standard/JsonModelDataType.java | 2 +- .../utils/format/spi/DataTypeConverter.java | 26 ++++++++++++++++++- .../format/spi/annotations/DataType.java | 4 ++- 8 files changed, 60 insertions(+), 10 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java index 11680b50b..9f2c31ceb 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java @@ -19,6 +19,7 @@ import org.apache.camel.Exchange; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; /** * Default data type converter receives a name and a target type in order to use traditional exchange body conversion @@ -26,14 +27,26 @@ */ public class DefaultDataTypeConverter implements DataTypeConverter { + private final String scheme; private final String name; + private final String mediaType; private final Class type; - public DefaultDataTypeConverter(String name, Class type) { + public DefaultDataTypeConverter(String scheme, String name, String mediaType, Class type) { + this.scheme = scheme; this.name = name; + this.mediaType = mediaType; this.type = type; } + public DefaultDataTypeConverter(String scheme, String name, Class type) { + this(scheme, name, "", type); + } + + public DefaultDataTypeConverter(String name, Class type) { + this(DataType.DEFAULT_SCHEME, name, type); + } + @Override public void convert(Exchange exchange) { if (type.isInstance(exchange.getMessage().getBody())) { @@ -48,6 +61,16 @@ public String getName() { return name; } + @Override + public String getScheme() { + return scheme; + } + + @Override + public String getMediaType() { + return mediaType; + } + public Class getType() { return type; } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java index d393e6c77..3d5b514e6 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -30,6 +30,7 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver; import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; import org.apache.camel.kamelets.utils.format.spi.DataTypeRegistry; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; import org.apache.camel.support.CamelContextHelper; import org.apache.camel.support.service.ServiceSupport; import org.apache.camel.util.ObjectHelper; @@ -87,7 +88,7 @@ public Optional lookup(String scheme, String name) { Optional dataTypeConverter = getDataTypeConverter(scheme, name); if (!dataTypeConverter.isPresent()) { - dataTypeConverter = getDataTypeConverter("camel", name); + dataTypeConverter = getDataTypeConverter(DataType.DEFAULT_SCHEME, name); } return dataTypeConverter; @@ -101,8 +102,8 @@ protected void doInit() throws Exception { dataTypeLoaders.add(new AnnotationDataTypeLoader()); } - addDataTypeConverter(new DefaultDataTypeConverter("string", String.class)); - addDataTypeConverter(new DefaultDataTypeConverter("binary", byte[].class)); + addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "string", "text/plain", String.class)); + addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "binary", "application/octet-stream", byte[].class)); for (DataTypeLoader loader : dataTypeLoaders) { CamelContextAware.trySetCamelContext(loader, getCamelContext()); diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java index a15ff3a08..471e569fd 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/ddb/Ddb2JsonInputType.java @@ -77,7 +77,7 @@ * In case key and item attribute value maps are identical you can omit the special top level properties completely. The * converter will map the whole Json body as is then and use it as source for the attribute value map. */ -@DataType(scheme = "aws2-ddb", name = "json") +@DataType(scheme = "aws2-ddb", name = "json", mediaType = "application/json") public class Ddb2JsonInputType implements DataTypeConverter { private final JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), JsonNode.class); diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java index 6065ebd10..5f1fa0b8a 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java @@ -30,7 +30,7 @@ /** * Binary output type. */ -@DataType(scheme = "aws2-s3", name = "binary") +@DataType(scheme = "aws2-s3", name = "binary", mediaType = "application/octet-stream") public class AWS2S3BinaryOutputType implements DataTypeConverter { @Override diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java index 399e01111..2eb5cb04b 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java @@ -33,7 +33,7 @@ * Output data type represents AWS S3 get object response as CloudEvent V1. The data type sets Camel specific * CloudEvent headers on the exchange. */ -@DataType(scheme = "aws2-s3", name = "cloudevents") +@DataType(scheme = "aws2-s3", name = "cloudevents", mediaType = "application/octet-stream") public class AWS2S3CloudEventOutputType implements DataTypeConverter { @Override diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java index d8d4ca4e6..54c677851 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java @@ -33,7 +33,7 @@ *

* Unmarshal type should be given as a fully qualified class name in the exchange properties. */ -@DataType(name = "jsonObject") +@DataType(name = "jsonObject", mediaType = "application/json") public class JsonModelDataType implements DataTypeConverter { public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel"; diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java index d39d30f80..a275b67b3 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java @@ -26,7 +26,7 @@ public interface DataTypeConverter { void convert(Exchange exchange); /** - * Gets the data type converter name. Automatically derives the name from given type annotation. + * Gets the data type converter name. Automatically derives the name from given data type annotation. * @return */ default String getName() { @@ -36,4 +36,28 @@ default String getName() { throw new UnsupportedOperationException("Missing data type converter name"); } + + /** + * Gets the data type component scheme. Automatically derived from given data type annotation. + * @return + */ + default String getScheme() { + if (this.getClass().isAnnotationPresent(DataType.class)) { + return this.getClass().getAnnotation(DataType.class).scheme(); + } + + return DataType.DEFAULT_SCHEME; + } + + /** + * Gets the data type media type. Automatically derived from given data type annotation. + * @return + */ + default String getMediaType() { + if (this.getClass().isAnnotationPresent(DataType.class)) { + return this.getClass().getAnnotation(DataType.class).mediaType(); + } + + return ""; + } } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java index b1d4f5a9c..40a3030a2 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java @@ -31,11 +31,13 @@ @Target({ ElementType.TYPE }) public @interface DataType { + String DEFAULT_SCHEME = "camel"; + /** * Camel component scheme. * @return */ - String scheme() default "camel"; + String scheme() default DEFAULT_SCHEME; /** * Data type name. From 0f2b88830bfdef05abcd0a1493e9bdeb8c15bc41 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 25 Nov 2022 10:51:35 +0100 Subject: [PATCH 18/28] Fix rest-openapi-sink YAKS test --- test/rest-openapi-sink/rest-openapi-sink.feature | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/rest-openapi-sink/rest-openapi-sink.feature b/test/rest-openapi-sink/rest-openapi-sink.feature index 396f23d55..c5de64062 100644 --- a/test/rest-openapi-sink/rest-openapi-sink.feature +++ b/test/rest-openapi-sink/rest-openapi-sink.feature @@ -41,7 +41,7 @@ Feature: REST OpenAPI Kamelet sink Then send HTTP 200 OK Scenario: Verify proper addPet request message sent - Given expect HTTP request body: citrus:readFile(classpath:openapi.json) + Given expect HTTP request body: ${pet} And HTTP request header Content-Type is "application/json" When receive POST /petstore/pet And send HTTP 201 CREATED From 26b6166319958f5a60099fac5e1893dbfffc2117 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Fri, 25 Nov 2022 15:27:14 +0100 Subject: [PATCH 19/28] Remove camel-cloudevents dependency - Avoid having the additional dependency in favor of using plain String constants --- .github/workflows/yaks-tests.yaml | 5 +++++ library/camel-kamelets-utils/pom.xml | 5 ----- .../aws2/s3/AWS2S3CloudEventOutputType.java | 14 +++++++++----- .../aws2/s3/AWS2S3CloudEventOutputTypeTest.java | 7 +++---- 4 files changed, 17 insertions(+), 14 deletions(-) diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml index e06d1751b..4acd7c8a2 100644 --- a/.github/workflows/yaks-tests.yaml +++ b/.github/workflows/yaks-tests.yaml @@ -109,7 +109,12 @@ jobs: run: | echo "Running tests" yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS + yaks run test/aws-s3/aws-s3-uri-binding.feature $YAKS_RUN_OPTIONS + yaks run test/aws-s3/aws-s3-source-property-conf.feature $YAKS_RUN_OPTIONS + yaks run test/aws-s3/aws-s3-source-secret-conf.feature $YAKS_RUN_OPTIONS + yaks run test/aws-s3/aws-s3-source-uri-conf.feature $YAKS_RUN_OPTIONS + yaks run test/extract-field-action $YAKS_RUN_OPTIONS yaks run test/insert-field-action $YAKS_RUN_OPTIONS yaks run test/mail-sink $YAKS_RUN_OPTIONS diff --git a/library/camel-kamelets-utils/pom.xml b/library/camel-kamelets-utils/pom.xml index 2aba210da..5b1441f31 100644 --- a/library/camel-kamelets-utils/pom.xml +++ b/library/camel-kamelets-utils/pom.xml @@ -82,11 +82,6 @@ camel-aws2-s3 provided - - org.apache.camel - camel-cloudevents - - diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java index 2eb5cb04b..d1906f24b 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java @@ -25,7 +25,6 @@ import org.apache.camel.Exchange; import org.apache.camel.component.aws2.s3.AWS2S3Constants; -import org.apache.camel.component.cloudevents.CloudEvent; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; @@ -36,14 +35,19 @@ @DataType(scheme = "aws2-s3", name = "cloudevents", mediaType = "application/octet-stream") public class AWS2S3CloudEventOutputType implements DataTypeConverter { + static final String CAMEL_CLOUD_EVENT_TYPE = "CamelCloudEventType"; + static final String CAMEL_CLOUD_EVENT_SOURCE = "CamelCloudEventSource"; + static final String CAMEL_CLOUD_EVENT_SUBJECT = "CamelCloudEventSubject"; + static final String CAMEL_CLOUD_EVENT_TIME = "CamelCloudEventTime"; + @Override public void convert(Exchange exchange) { final Map headers = exchange.getMessage().getHeaders(); - headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source"); - headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class)); - headers.put(CloudEvent.CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class)); - headers.put(CloudEvent.CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange)); + headers.put(CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source"); + headers.put(CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class)); + headers.put(CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class)); + headers.put(CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange)); } private String getEventTime(Exchange exchange) { diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java index e139b2b9c..f2d41606e 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java @@ -24,7 +24,6 @@ import org.apache.camel.CamelContextAware; import org.apache.camel.Exchange; import org.apache.camel.component.aws2.s3.AWS2S3Constants; -import org.apache.camel.component.cloudevents.CloudEvents; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; @@ -53,9 +52,9 @@ void shouldMapToCloudEvent() throws Exception { Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY)); - assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_TYPE)); - assertEquals("test1.txt", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SUBJECT)); - assertEquals("myBucket", exchange.getMessage().getHeader(CloudEvents.CAMEL_CLOUD_EVENT_SOURCE)); + assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_TYPE)); + assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SUBJECT)); + assertEquals("myBucket", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SOURCE)); } @Test From 0f99d4be313848d14d800836bb8598369ea1a83b Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Tue, 29 Nov 2022 09:53:51 +0100 Subject: [PATCH 20/28] Move AWS S3 binary output type to generic level --- .../format/DefaultDataTypeConverter.java | 10 ++- .../utils/format/DefaultDataTypeRegistry.java | 15 +++- .../aws2/s3/AWS2S3BinaryOutputType.java | 55 ------------ .../converter/standard/BinaryDataType.java | 38 ++++++++ .../converter/standard/StringDataType.java | 38 ++++++++ .../{aws2-s3-binary => camel-binary} | 2 +- .../camel/datatype/converter/camel-jsonObject | 18 ++++ .../camel/datatype/converter/camel-string | 18 ++++ .../format/DefaultDataTypeRegistryTest.java | 8 +- .../BinaryDataTypeTest.java} | 41 +++++---- .../standard/StringDataTypeTest.java | 90 +++++++++++++++++++ 11 files changed, 252 insertions(+), 81 deletions(-) delete mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataType.java create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataType.java rename library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/{aws2-s3-binary => camel-binary} (90%) create mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject create mode 100644 library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string rename library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/{aws2/s3/AWS2S3BinaryOutputTypeTest.java => standard/BinaryDataTypeTest.java} (72%) create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java index 9f2c31ceb..b639ceaeb 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeConverter.java @@ -17,9 +17,12 @@ package org.apache.camel.kamelets.utils.format; +import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange; +import org.apache.camel.InvalidPayloadException; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; +import org.apache.camel.util.ObjectHelper; /** * Default data type converter receives a name and a target type in order to use traditional exchange body conversion @@ -53,7 +56,12 @@ public void convert(Exchange exchange) { return; } - exchange.getMessage().setBody(exchange.getMessage().getBody(type)); + try { + exchange.getMessage().setBody(exchange.getMessage().getMandatoryBody(type)); + } catch (InvalidPayloadException e) { + throw new CamelExecutionException(String.format("Failed to convert exchange body to '%s' content using type %s", + name, ObjectHelper.name(type)), exchange, e); + } } @Override diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java index 3d5b514e6..1e530468c 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -26,6 +26,9 @@ import org.apache.camel.CamelContext; import org.apache.camel.CamelContextAware; import org.apache.camel.RuntimeCamelException; +import org.apache.camel.kamelets.utils.format.converter.standard.BinaryDataType; +import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType; +import org.apache.camel.kamelets.utils.format.converter.standard.StringDataType; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverterResolver; import org.apache.camel.kamelets.utils.format.spi.DataTypeLoader; @@ -55,6 +58,7 @@ public class DefaultDataTypeRegistry extends ServiceSupport implements DataTypeR private DataTypeConverterResolver dataTypeConverterResolver; private boolean classpathScan = true; + private boolean useDefaultConverters = true; private final Map> dataTypeConverters = new HashMap<>(); @@ -100,11 +104,12 @@ protected void doInit() throws Exception { if (classpathScan) { dataTypeLoaders.add(new AnnotationDataTypeLoader()); + } else if (useDefaultConverters) { + addDataTypeConverter(new BinaryDataType()); + addDataTypeConverter(new StringDataType()); + addDataTypeConverter(new JsonModelDataType()); } - addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "string", "text/plain", String.class)); - addDataTypeConverter(new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "binary", "application/octet-stream", byte[].class)); - for (DataTypeLoader loader : dataTypeLoaders) { CamelContextAware.trySetCamelContext(loader, getCamelContext()); loader.load(this); @@ -180,6 +185,10 @@ public void setClasspathScan(boolean classpathScan) { this.classpathScan = classpathScan; } + public void setUseDefaultConverters(boolean useDefaultConverters) { + this.useDefaultConverters = useDefaultConverters; + } + @Override public CamelContext getCamelContext() { return camelContext; diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java deleted file mode 100644 index 5f1fa0b8a..000000000 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputType.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.camel.kamelets.utils.format.converter.aws2.s3; - -import java.io.IOException; -import java.io.InputStream; - -import org.apache.camel.CamelExecutionException; -import org.apache.camel.Exchange; -import org.apache.camel.InvalidPayloadException; -import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; -import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; -import software.amazon.awssdk.utils.IoUtils; - -/** - * Binary output type. - */ -@DataType(scheme = "aws2-s3", name = "binary", mediaType = "application/octet-stream") -public class AWS2S3BinaryOutputType implements DataTypeConverter { - - @Override - public void convert(Exchange exchange) { - if (exchange.getMessage().getBody() instanceof byte[]) { - return; - } - - try { - InputStream is = exchange.getMessage().getBody(InputStream.class); - if (is != null) { - exchange.getMessage().setBody(IoUtils.toByteArray(is)); - return; - } - - // Use default Camel converter utils to convert body to byte[] - exchange.getMessage().setBody(exchange.getMessage().getMandatoryBody(byte[].class)); - } catch (IOException | InvalidPayloadException e) { - throw new CamelExecutionException("Failed to convert AWS S3 body to byte[]", exchange, e); - } - } -} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataType.java new file mode 100644 index 000000000..532e998ba --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataType.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.standard; + +import org.apache.camel.Exchange; +import org.apache.camel.kamelets.utils.format.DefaultDataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; + +/** + * Binary data type. + */ +@DataType(name = "binary", mediaType = "application/octet-stream") +public class BinaryDataType implements DataTypeConverter { + + private static final DataTypeConverter DELEGATE = + new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "binary", "application/octet-stream", byte[].class); + + @Override + public void convert(Exchange exchange) { + DELEGATE.convert(exchange); + } +} diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataType.java new file mode 100644 index 000000000..d60b2aaa9 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataType.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.camel.kamelets.utils.format.converter.standard; + +import org.apache.camel.Exchange; +import org.apache.camel.kamelets.utils.format.DefaultDataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; + +/** + * String data type. + */ +@DataType(name = "string", mediaType = "text/plain") +public class StringDataType implements DataTypeConverter { + + private static final DataTypeConverter DELEGATE = + new DefaultDataTypeConverter(DataType.DEFAULT_SCHEME, "string", "text/plain", String.class); + + @Override + public void convert(Exchange exchange) { + DELEGATE.convert(exchange); + } +} diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-binary similarity index 90% rename from library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary rename to library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-binary index ba9c13f34..edf9a4ca6 100644 --- a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/aws2-s3-binary +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-binary @@ -15,4 +15,4 @@ # limitations under the License. # -class=org.apache.camel.kamelets.utils.format.converter.aws2.s3.AWS2S3BinaryOutputType \ No newline at end of file +class=org.apache.camel.kamelets.utils.format.converter.standard.BinaryDataType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject new file mode 100644 index 000000000..2f725f6aa --- /dev/null +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-jsonObject @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string new file mode 100644 index 000000000..8ef257256 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/resources/META-INF/services/org/apache/camel/datatype/converter/camel-string @@ -0,0 +1,18 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +class=org.apache.camel.kamelets.utils.format.converter.standard.StringDataType \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java index c72e7897a..d83c474b2 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistryTest.java @@ -21,7 +21,9 @@ import org.apache.camel.CamelContextAware; import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.converter.standard.BinaryDataType; import org.apache.camel.kamelets.utils.format.converter.standard.JsonModelDataType; +import org.apache.camel.kamelets.utils.format.converter.standard.StringDataType; import org.apache.camel.kamelets.utils.format.converter.test.UppercaseDataType; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.junit.jupiter.api.Assertions; @@ -44,12 +46,10 @@ public void shouldLookupDefaultDataTypeConverters() throws Exception { Assertions.assertEquals(JsonModelDataType.class, converter.get().getClass()); converter = dataTypeRegistry.lookup( "string"); Assertions.assertTrue(converter.isPresent()); - Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass()); - Assertions.assertEquals(String.class, ((DefaultDataTypeConverter) converter.get()).getType()); + Assertions.assertEquals(StringDataType.class, converter.get().getClass()); converter = dataTypeRegistry.lookup( "binary"); Assertions.assertTrue(converter.isPresent()); - Assertions.assertEquals(DefaultDataTypeConverter.class, converter.get().getClass()); - Assertions.assertEquals(byte[].class, ((DefaultDataTypeConverter) converter.get()).getType()); + Assertions.assertEquals(BinaryDataType.class, converter.get().getClass()); converter = dataTypeRegistry.lookup( "lowercase"); Assertions.assertTrue(converter.isPresent()); converter = dataTypeRegistry.lookup( "uppercase"); diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataTypeTest.java similarity index 72% rename from library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java rename to library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataTypeTest.java index 26b359f47..d2dd616a2 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3BinaryOutputTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/BinaryDataTypeTest.java @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.camel.kamelets.utils.format.converter.aws2.s3; +package org.apache.camel.kamelets.utils.format.converter.standard; import java.io.ByteArrayInputStream; import java.nio.charset.StandardCharsets; @@ -22,32 +22,40 @@ import org.apache.camel.CamelContextAware; import org.apache.camel.Exchange; -import org.apache.camel.component.aws2.s3.AWS2S3Constants; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.support.DefaultExchange; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; -import software.amazon.awssdk.core.ResponseInputStream; -import software.amazon.awssdk.http.AbortableInputStream; -import software.amazon.awssdk.services.s3.model.GetObjectRequest; import static org.junit.jupiter.api.Assertions.assertEquals; -public class AWS2S3BinaryOutputTypeTest { +public class BinaryDataTypeTest { private final DefaultCamelContext camelContext = new DefaultCamelContext(); - private final AWS2S3BinaryOutputType outputType = new AWS2S3BinaryOutputType(); + private final BinaryDataType dataType = new BinaryDataType(); + + @Test + void shouldRetainBytesModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader("file", "test.txt"); + exchange.getMessage().setBody("Test".getBytes(StandardCharsets.UTF_8)); + dataType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + assertBinaryBody(exchange, "test.txt", "Test"); + } @Test void shouldMapFromStringToBytesModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); - exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test1.txt"); + exchange.getMessage().setHeader("file", "test1.txt"); exchange.getMessage().setBody("Test1"); - outputType.convert(exchange); + dataType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); assertBinaryBody(exchange, "test1.txt", "Test1"); @@ -57,9 +65,9 @@ void shouldMapFromStringToBytesModel() throws Exception { void shouldMapFromBytesToBytesModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); - exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test2.txt"); + exchange.getMessage().setHeader("file", "test2.txt"); exchange.getMessage().setBody("Test2".getBytes(StandardCharsets.UTF_8)); - outputType.convert(exchange); + dataType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); assertBinaryBody(exchange, "test2.txt", "Test2"); @@ -69,10 +77,9 @@ void shouldMapFromBytesToBytesModel() throws Exception { void shouldMapFromInputStreamToBytesModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); - exchange.getMessage().setHeader(AWS2S3Constants.KEY, "test3.txt"); - exchange.getMessage().setBody(new ResponseInputStream<>(GetObjectRequest.builder().bucket("myBucket").key("test3.txt").build(), - AbortableInputStream.create(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8))))); - outputType.convert(exchange); + exchange.getMessage().setHeader("file", "test3.txt"); + exchange.getMessage().setBody(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8))); + dataType.convert(exchange); Assertions.assertTrue(exchange.getMessage().hasHeaders()); assertBinaryBody(exchange, "test3.txt", "Test3"); @@ -82,12 +89,12 @@ void shouldMapFromInputStreamToBytesModel() throws Exception { public void shouldLookupDataType() throws Exception { DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); - Optional converter = dataTypeRegistry.lookup("aws2-s3", "binary"); + Optional converter = dataTypeRegistry.lookup( "binary"); Assertions.assertTrue(converter.isPresent()); } private static void assertBinaryBody(Exchange exchange, String key, String content) { - assertEquals(key, exchange.getMessage().getHeader(AWS2S3Constants.KEY)); + assertEquals(key, exchange.getMessage().getHeader("file")); assertEquals(byte[].class, exchange.getMessage().getBody().getClass()); assertEquals(content, exchange.getMessage().getBody(String.class)); diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java new file mode 100644 index 000000000..8ee19cbab --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/StringDataTypeTest.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.kamelets.utils.format.converter.standard; + +import java.io.ByteArrayInputStream; +import java.nio.charset.StandardCharsets; +import java.util.Optional; + +import org.apache.camel.CamelContextAware; +import org.apache.camel.Exchange; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry; +import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; +import org.apache.camel.support.DefaultExchange; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class StringDataTypeTest { + + private final DefaultCamelContext camelContext = new DefaultCamelContext(); + + private final StringDataType dataType = new StringDataType(); + + @Test + void shouldRetainStringModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader("file", "test.txt"); + exchange.getMessage().setBody("Test"); + dataType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + assertStringBody(exchange, "test.txt", "Test"); + } + + @Test + void shouldMapFromBinaryToStringModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader("file", "test1.txt"); + exchange.getMessage().setBody("Test1".getBytes(StandardCharsets.UTF_8)); + dataType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + assertStringBody(exchange, "test1.txt", "Test1"); + } + + @Test + void shouldMapFromInputStreamToStringModel() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setHeader("file", "test3.txt"); + exchange.getMessage().setBody(new ByteArrayInputStream("Test3".getBytes(StandardCharsets.UTF_8))); + dataType.convert(exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + assertStringBody(exchange, "test3.txt", "Test3"); + } + + @Test + public void shouldLookupDataType() throws Exception { + DefaultDataTypeRegistry dataTypeRegistry = new DefaultDataTypeRegistry(); + CamelContextAware.trySetCamelContext(dataTypeRegistry, camelContext); + Optional converter = dataTypeRegistry.lookup( "string"); + Assertions.assertTrue(converter.isPresent()); + } + + private static void assertStringBody(Exchange exchange, String key, String content) { + assertEquals(key, exchange.getMessage().getHeader("file")); + + assertEquals(String.class, exchange.getMessage().getBody().getClass()); + assertEquals(content, exchange.getMessage().getBody(String.class)); + } +} From 4fd0681236cd9e02c05061ac288983852f1e3a38 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Tue, 29 Nov 2022 10:14:20 +0100 Subject: [PATCH 21/28] Do cache ObjectMapper instance in JsonModelDatType converter Also use Camel ClassResolver API to resolve model class --- .../utils/format/DefaultDataTypeRegistry.java | 7 ++++++ .../converter/standard/JsonModelDataType.java | 23 +++++++++++++++++-- .../standard/JsonModelDataTypeTest.java | 6 +++++ 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java index 1e530468c..24c77b706 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/DefaultDataTypeRegistry.java @@ -115,6 +115,13 @@ protected void doInit() throws Exception { loader.load(this); } + // if applicable set Camel context on all loaded data type converters + dataTypeConverters.values().forEach(converters -> converters.forEach(converter -> { + if (converter instanceof CamelContextAware && ((CamelContextAware) converter).getCamelContext() == null) { + CamelContextAware.trySetCamelContext(converter, camelContext); + } + })); + LOG.debug("Loaded {} schemes holding {} data type converters", dataTypeConverters.size(), dataTypeConverters.values().stream().mapToInt(List::size).sum()); } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java index 54c677851..0a80ee328 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java @@ -21,12 +21,15 @@ import java.io.InputStream; import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.camel.CamelContext; +import org.apache.camel.CamelContextAware; import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange; import org.apache.camel.InvalidPayloadException; import org.apache.camel.component.jackson.JacksonDataFormat; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; +import org.apache.camel.util.ObjectHelper; /** * Data type converter able to unmarshal to given unmarshalType using jackson data format. @@ -34,18 +37,24 @@ * Unmarshal type should be given as a fully qualified class name in the exchange properties. */ @DataType(name = "jsonObject", mediaType = "application/json") -public class JsonModelDataType implements DataTypeConverter { +public class JsonModelDataType implements DataTypeConverter, CamelContextAware { public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel"; + private CamelContext camelContext; + + private static final ObjectMapper mapper = new ObjectMapper(); + @Override public void convert(Exchange exchange) { if (!exchange.hasProperties() || !exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) { return; } + ObjectHelper.notNull(camelContext, "camelContext"); + String type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class); - try (JacksonDataFormat dataFormat = new JacksonDataFormat(new ObjectMapper(), Class.forName(type))) { + try (JacksonDataFormat dataFormat = new JacksonDataFormat(mapper, camelContext.getClassResolver().resolveMandatoryClass(type))) { Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange)); exchange.getMessage().setBody(unmarshalled); } catch (Exception e) { @@ -63,4 +72,14 @@ private InputStream getBodyAsStream(Exchange exchange) throws InvalidPayloadExce return bodyStream; } + + @Override + public CamelContext getCamelContext() { + return camelContext; + } + + @Override + public void setCamelContext(CamelContext camelContext) { + this.camelContext = camelContext; + } } diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java index d93da2348..cb253a163 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java @@ -27,6 +27,7 @@ import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.support.DefaultExchange; import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -37,6 +38,11 @@ public class JsonModelDataTypeTest { private final JsonModelDataType dataType = new JsonModelDataType(); + @BeforeEach + public void setup() { + dataType.setCamelContext(camelContext); + } + @Test void shouldMapFromStringToJsonModel() throws Exception { Exchange exchange = new DefaultExchange(camelContext); From 29e2cc9d50522d3cc4b889c76abcd872f78e6694 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Tue, 29 Nov 2022 11:01:13 +0100 Subject: [PATCH 22/28] Enhance documentation on data type SPI --- .../utils/format/spi/DataTypeConverter.java | 20 ++++++++-- .../format/spi/DataTypeConverterResolver.java | 26 +++++++------ .../utils/format/spi/DataTypeLoader.java | 6 +-- .../utils/format/spi/DataTypeRegistry.java | 38 +++++++++++-------- .../format/spi/annotations/DataType.java | 15 +++++--- 5 files changed, 66 insertions(+), 39 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java index a275b67b3..f9c175b03 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverter.java @@ -20,14 +20,23 @@ import org.apache.camel.Exchange; import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; +/** + * Converter applies custom logic to a given exchange in order to update the message content in that exchange according to + * the data type. + */ @FunctionalInterface public interface DataTypeConverter { + /** + * Changes the exchange message content (body and/or header) to represent the data type. + * @param exchange the exchange that should have its message content applied to the data type. + */ void convert(Exchange exchange); /** - * Gets the data type converter name. Automatically derives the name from given data type annotation. - * @return + * Gets the data type converter name. Automatically derives the name from given data type annotation if any. + * Subclasses may add a fallback logic to determine the data type name in case the annotation is missing. + * @return the name of the data type. */ default String getName() { if (this.getClass().isAnnotationPresent(DataType.class)) { @@ -39,7 +48,8 @@ default String getName() { /** * Gets the data type component scheme. Automatically derived from given data type annotation. - * @return + * Subclasses may add custom logic to determine the data type scheme. By default, the generic Camel scheme is used. + * @return the component scheme of the data type. */ default String getScheme() { if (this.getClass().isAnnotationPresent(DataType.class)) { @@ -51,7 +61,9 @@ default String getScheme() { /** * Gets the data type media type. Automatically derived from given data type annotation. - * @return + * Subclasses may add additional logic to determine the media type when annotation is missing. + * By default, returns empty String as a media type. + * @return the media type of the data type. */ default String getMediaType() { if (this.getClass().isAnnotationPresent(DataType.class)) { diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java index 17c48664f..f54aaa926 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeConverterResolver.java @@ -20,30 +20,34 @@ import java.util.Optional; import org.apache.camel.CamelContext; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; /** - * Represents a resolver of data type converters from a URI to be able to lazy load them using some discovery mechanism. + * Resolves data type converters from URI to be able to lazy load converters using factory finder discovery mechanism. */ @FunctionalInterface public interface DataTypeConverterResolver { /** - * Attempts to resolve the converter for the given URI. + * Attempts to resolve the converter for the given scheme and name. Usually uses the factory finder URI to resolve the converter. + * Scheme and name may be combined in order to resolve component specific converters. Usually implements a fallback + * resolving mechanism when no matching converter for scheme and name is found (e.g. search for generic Camel converters just using the name). * - * @param scheme - * @param name - * @param camelContext - * @return + * @param scheme the data type scheme. + * @param name the data type name. + * @param camelContext the current Camel context. + * @return optional data type resolved via URI factory finder. */ Optional resolve(String scheme, String name, CamelContext camelContext); /** - * Attempts to resolve default converter for the given name. - * @param name - * @param camelContext - * @return + * Attempts to resolve default converter for the given name. Uses default Camel scheme to resolve the converter via factory finder mechanism. + * + * @param name the data type name. + * @param camelContext the current Camel context. + * @return optional data type resolved via URI factory finder. */ default Optional resolve(String name, CamelContext camelContext) { - return resolve("camel", name, camelContext); + return resolve(DataType.DEFAULT_SCHEME, name, camelContext); } } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java index 73f87c696..453485fef 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeLoader.java @@ -18,14 +18,14 @@ package org.apache.camel.kamelets.utils.format.spi; /** - * A pluggable strategy to load data types into a {@link DataTypeRegistry}. + * A pluggable strategy to load data types into a {@link DataTypeRegistry}. Loads one to many data type converters to the given registry. */ public interface DataTypeLoader { /** - * A pluggable strategy to load data types into a registry. + * A pluggable strategy to load data types into a given registry. * - * @param registry the registry to load the data types into + * @param registry the registry to load the data types into. */ void load(DataTypeRegistry registry); } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java index cb2bedc91..d47185477 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/DataTypeRegistry.java @@ -19,42 +19,50 @@ import java.util.Optional; +import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; + /** - * Registry for data types. Data type loaders should be used to add types to the registry. + * Registry for data types and its converters. Data type loaders should be used to add members to the registry. *

- * The registry is able to perform a lookup of a specific data type. + * The registry is able to perform a lookup of a specific data type by its given scheme and name. Usually data types are grouped + * by their component scheme so users may use component specific converters and default Camel converters. */ public interface DataTypeRegistry { /** - * Registers a new default data type converter. - * @param scheme - * @param converter + * Registers a new default data type converter. Usually used to add default Camel converter implementations. + * + * @param scheme the data type scheme. + * @param converter the converter implementation. */ void addDataTypeConverter(String scheme, DataTypeConverter converter); /** - * Registers a new default data type converter. - * @param converter + * Registers a new default data type converter. Uses the default Camel scheme to mark this converter as generic one. + * + * @param converter the data type converter implementation. */ default void addDataTypeConverter(DataTypeConverter converter) { - addDataTypeConverter("camel", converter); + addDataTypeConverter(DataType.DEFAULT_SCHEME, converter); } /** - * Find data type for given component scheme and data type name. - * @param scheme - * @param name - * @return + * Find data type for given component scheme and data type name. Searches for the component scheme specific converter first. + * As a fallback may also try to resolve the converter with only the name in the given set of default Camel converters registered in this registry. + * + * @param scheme the data type converter scheme (usually a component scheme). + * @param name the data type converter name. + * @return optional data type converter implementation matching the given scheme and name. */ Optional lookup(String scheme, String name); /** - * Find data type for given data type name. - * @param name + * Find data type for given data type name. Just searches the set of default Camel converter implementations registered in this registry. + * + * @param name the data type converter name. * @return */ default Optional lookup(String name) { - return lookup("camel", name); + return lookup(DataType.DEFAULT_SCHEME, name); } } diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java index 40a3030a2..b52088878 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/spi/annotations/DataType.java @@ -24,7 +24,10 @@ import java.lang.annotation.Target; /** - * Data type annotation defines a type with its component scheme, a name and input/output types. + * Data type annotation defines a data type with its component scheme, a name and optional media types. + *

+ * The annotation is used by specific classpath scanning data type loaders to automatically add the data types to + * a registry. */ @Retention(RetentionPolicy.RUNTIME) @Documented @@ -34,20 +37,20 @@ String DEFAULT_SCHEME = "camel"; /** - * Camel component scheme. - * @return + * Camel component scheme. Specifies whether a data type is component specific. + * @return the data type scheme. */ String scheme() default DEFAULT_SCHEME; /** - * Data type name. - * @return + * Data type name. Identifies the data type. Should be unique in combination with scheme. + * @return the data type name. */ String name(); /** * The media type associated with this data type. - * @return + * @return the media type or empty string as default. */ String mediaType() default ""; } From 4cc1de44f7c4d3935f81729b9af322f3f2f1831f Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Wed, 30 Nov 2022 13:02:55 +0100 Subject: [PATCH 23/28] Improve CloudEvents output produced by AWS S3 source - Align with CloudEvents spec in creating proper event type and source values - Enable Knative YAKS tests --- .github/actions/install-knative/action.yml | 26 ++++ .../install-knative/install-knative.sh | 142 ++++++++++++++++++ .github/workflows/yaks-tests.yaml | 7 +- .../aws2/s3/AWS2S3CloudEventOutputType.java | 4 +- .../s3/AWS2S3CloudEventOutputTypeTest.java | 4 +- test/aws-s3/aws-s3-cloudevents.feature | 8 +- test/aws-s3/aws-s3-knative-binding.feature | 16 +- test/aws-s3/aws-s3-knative.feature | 5 + test/aws-s3/yaks-config.yaml | 2 +- test/utils/knative-channel-to-log.yaml | 34 +++++ 10 files changed, 230 insertions(+), 18 deletions(-) create mode 100644 .github/actions/install-knative/action.yml create mode 100755 .github/actions/install-knative/install-knative.sh create mode 100644 test/utils/knative-channel-to-log.yaml diff --git a/.github/actions/install-knative/action.yml b/.github/actions/install-knative/action.yml new file mode 100644 index 000000000..24dd36f9a --- /dev/null +++ b/.github/actions/install-knative/action.yml @@ -0,0 +1,26 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +name: install-knative +description: 'Install Knative serving and eventing' +runs: + using: "composite" + steps: + - name: Install Knative + shell: bash + run: | + ./.github/actions/install-knative/install-knative.sh diff --git a/.github/actions/install-knative/install-knative.sh b/.github/actions/install-knative/install-knative.sh new file mode 100755 index 000000000..8434afc94 --- /dev/null +++ b/.github/actions/install-knative/install-knative.sh @@ -0,0 +1,142 @@ +#!/bin/bash + +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +#### +# +# Install the knative setup +# +#### + +set -e + +# Prerequisites +sudo wget https://github.com/mikefarah/yq/releases/download/v4.26.1/yq_linux_amd64 -O /usr/bin/yq && sudo chmod +x /usr/bin/yq + +set +e + +export SERVING_VERSION=knative-v1.6.0 +export EVENTING_VERSION=knative-v1.6.0 +export KOURIER_VERSION=knative-v1.6.0 + +apply() { + local file="${1:-}" + if [ -z "${file}" ]; then + echo "Error: Cannot apply. No file." + exit 1 + fi + + kubectl apply --filename ${file} + if [ $? != 0 ]; then + sleep 5 + echo "Re-applying ${file} ..." + kubectl apply --filename ${file} + if [ $? != 0 ]; then + echo "Error: Application of resource failed." + exit 1 + fi + fi +} + +SERVING_CRDS="https://github.com/knative/serving/releases/download/${SERVING_VERSION}/serving-crds.yaml" +SERVING_CORE="https://github.com/knative/serving/releases/download/${SERVING_VERSION}/serving-core.yaml" +KOURIER="https://github.com/knative-sandbox/net-kourier/releases/download/${KOURIER_VERSION}/kourier.yaml" +EVENTING_CRDS="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/eventing-crds.yaml" +EVENTING_CORE="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/eventing-core.yaml" +IN_MEMORY_CHANNEL="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/in-memory-channel.yaml" +CHANNEL_BROKER="https://github.com/knative/eventing/releases/download/${EVENTING_VERSION}/mt-channel-broker.yaml" + +# Serving +apply "${SERVING_CRDS}" + +YAML=$(mktemp serving-core-XXX.yaml) +curl -L -s ${SERVING_CORE} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML} +if [ -s ${YAML} ]; then + apply ${YAML} + echo "Waiting for pods to be ready in knative-serving (dependency for kourier)" + kubectl wait --for=condition=Ready pod --all -n knative-serving --timeout=60s +else + echo "Error: Failed to correctly download ${SERVING_CORE}" + exit 1 +fi + +# Kourier +apply "${KOURIER}" + +sleep 5 + +kubectl patch configmap/config-network \ + --namespace knative-serving \ + --type merge \ + --patch '{"data":{"ingress.class":"kourier.ingress.networking.knative.dev"}}' +if [ $? != 0 ]; then + echo "Error: Failed to patch configmap" + exit 1 +fi + +# Eventing +apply "${EVENTING_CRDS}" + +YAML=$(mktemp eventing-XXX.yaml) +curl -L -s ${EVENTING_CORE} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML} +if [ -s ${YAML} ]; then + apply ${YAML} +else + echo "Error: Failed to correctly download ${SERVING_CORE}" + exit 1 +fi + +# Eventing channels +YAML=$(mktemp in-memory-XXX.yaml) +curl -L -s ${IN_MEMORY_CHANNEL} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML} +if [ -s ${YAML} ]; then + apply ${YAML} +else + echo "Error: Failed to correctly download ${SERVING_CORE}" + exit 1 +fi + +# Eventing broker +YAML=$(mktemp channel-broker-XXX.yaml) +curl -L -s ${CHANNEL_BROKER} | head -n -1 | yq e 'del(.spec.template.spec.containers[].resources)' - > ${YAML} +if [ -s ${YAML} ]; then + apply ${YAML} +else + echo "Error: Failed to correctly download ${SERVING_CORE}" + exit 1 +fi + +# Eventing sugar controller configuration +echo "Patching Knative eventing configuration" +kubectl patch configmap/config-sugar \ + -n knative-eventing \ + --type merge \ + -p '{"data":{"namespace-selector":"{\"matchExpressions\":[{\"key\":\"eventing.knative.dev/injection\",\"operator\":\"In\",\"values\":[\"enabled\"]}]}"}}' + +kubectl patch configmap/config-sugar \ + -n knative-eventing \ + --type merge \ + -p '{"data":{"trigger-selector":"{\"matchExpressions\":[{\"key\":\"eventing.knative.dev/injection\",\"operator\":\"In\",\"values\":[\"enabled\"]}]}"}}' + +# Wait for installation completed +echo "Waiting for all pods to be ready in kourier-system" +kubectl wait --for=condition=Ready pod --all -n kourier-system --timeout=60s +echo "Waiting for all pods to be ready in knative-serving" +kubectl wait --for=condition=Ready pod --all -n knative-serving --timeout=60s +echo "Waiting for all pods to be ready in knative-eventing" +kubectl wait --for=condition=Ready pod --all -n knative-eventing --timeout=60s diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml index 4acd7c8a2..73dcec77c 100644 --- a/.github/workflows/yaks-tests.yaml +++ b/.github/workflows/yaks-tests.yaml @@ -91,6 +91,8 @@ jobs: kubectl version kubectl cluster-info kubectl describe nodes + - name: Install Knative + uses: ./.github/actions/install-knative - name: Install Camel K run: | # Configure install options @@ -110,10 +112,7 @@ jobs: echo "Running tests" yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS - yaks run test/aws-s3/aws-s3-uri-binding.feature $YAKS_RUN_OPTIONS - yaks run test/aws-s3/aws-s3-source-property-conf.feature $YAKS_RUN_OPTIONS - yaks run test/aws-s3/aws-s3-source-secret-conf.feature $YAKS_RUN_OPTIONS - yaks run test/aws-s3/aws-s3-source-uri-conf.feature $YAKS_RUN_OPTIONS + yaks run test/aws-s3 $YAKS_RUN_OPTIONS yaks run test/extract-field-action $YAKS_RUN_OPTIONS yaks run test/insert-field-action $YAKS_RUN_OPTIONS diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java index d1906f24b..4bc87192f 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputType.java @@ -44,8 +44,8 @@ public class AWS2S3CloudEventOutputType implements DataTypeConverter { public void convert(Exchange exchange) { final Map headers = exchange.getMessage().getHeaders(); - headers.put(CAMEL_CLOUD_EVENT_TYPE, "kamelet.aws.s3.source"); - headers.put(CAMEL_CLOUD_EVENT_SOURCE, exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class)); + headers.put(CAMEL_CLOUD_EVENT_TYPE, "org.apache.camel.event.aws.s3.getObject"); + headers.put(CAMEL_CLOUD_EVENT_SOURCE, "aws.s3.bucket." + exchange.getMessage().getHeader(AWS2S3Constants.BUCKET_NAME, String.class)); headers.put(CAMEL_CLOUD_EVENT_SUBJECT, exchange.getMessage().getHeader(AWS2S3Constants.KEY, String.class)); headers.put(CAMEL_CLOUD_EVENT_TIME, getEventTime(exchange)); } diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java index f2d41606e..084f4c161 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/aws2/s3/AWS2S3CloudEventOutputTypeTest.java @@ -52,9 +52,9 @@ void shouldMapToCloudEvent() throws Exception { Assertions.assertTrue(exchange.getMessage().hasHeaders()); Assertions.assertTrue(exchange.getMessage().getHeaders().containsKey(AWS2S3Constants.KEY)); - assertEquals("kamelet.aws.s3.source", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_TYPE)); + assertEquals("org.apache.camel.event.aws.s3.getObject", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_TYPE)); assertEquals("test1.txt", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SUBJECT)); - assertEquals("myBucket", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SOURCE)); + assertEquals("aws.s3.bucket.myBucket", exchange.getMessage().getHeader(AWS2S3CloudEventOutputType.CAMEL_CLOUD_EVENT_SOURCE)); } @Test diff --git a/test/aws-s3/aws-s3-cloudevents.feature b/test/aws-s3/aws-s3-cloudevents.feature index 1e2f7d1ef..52ac84c54 100644 --- a/test/aws-s3/aws-s3-cloudevents.feature +++ b/test/aws-s3/aws-s3-cloudevents.feature @@ -20,6 +20,10 @@ Feature: AWS S3 Kamelet - cloud events data type Given New global Camel context Given load to Camel registry amazonS3Client.groovy + Scenario: Create Knative broker + Given create Knative broker default + And Knative broker default is running + Scenario: Create AWS-S3 Kamelet to Knative binding Given variable loginfo is "Installed features" When load KameletBinding aws-s3-to-knative.yaml @@ -35,8 +39,8 @@ Feature: AWS S3 Kamelet - cloud events data type Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} Then expect Knative event data: ${aws.s3.message} And verify Knative event - | type | kamelet.aws.s3.source | - | source | ${aws.s3.bucketNameOrArn} | + | type | org.apache.camel.event.aws.s3.getObject | + | source | aws.s3.bucket.${aws.s3.bucketNameOrArn} | | subject | ${aws.s3.key} | | id | @ignore@ | diff --git a/test/aws-s3/aws-s3-knative-binding.feature b/test/aws-s3/aws-s3-knative-binding.feature index c143bbeee..e94ab7157 100644 --- a/test/aws-s3/aws-s3-knative-binding.feature +++ b/test/aws-s3/aws-s3-knative-binding.feature @@ -17,31 +17,33 @@ Feature: AWS S3 Kamelet - binding to Knative Given New global Camel context Given load to Camel registry amazonS3Client.groovy - Scenario: Create Knative broker + Scenario: Create Knative broker and channel Given create Knative broker default And Knative broker default is running + And create Knative channel messages Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding Given variable loginfo is "Installed features" Given load KameletBinding aws-s3-to-knative.yaml - Given load KameletBinding knative-to-log.yaml + Given load KameletBinding knative-channel-to-log.yaml Then KameletBinding aws-s3-to-knative should be available - And KameletBinding knative-to-log should be available + And KameletBinding knative-channel-to-log should be available And Camel K integration aws-s3-to-knative is running - And Camel K integration knative-to-log is running + And Camel K integration knative-channel-to-log is running And Camel K integration aws-s3-to-knative should print ${loginfo} - And Camel K integration knative-to-log should print ${loginfo} + And Camel K integration knative-channel-to-log should print ${loginfo} Then sleep 10000 ms Scenario: Verify Kamelet source Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} - Then Camel K integration knative-to-log should print ${aws.s3.message} + Then Camel K integration knative-channel-to-log should print ${aws.s3.message} Scenario: Remove resources Given delete KameletBinding aws-s3-to-knative - Given delete KameletBinding knative-to-log + Given delete KameletBinding knative-channel-to-log Given delete Knative broker default + Given delete Knative channel messages Scenario: Stop container Given stop LocalStack container diff --git a/test/aws-s3/aws-s3-knative.feature b/test/aws-s3/aws-s3-knative.feature index 148ec1d6f..fe080fa21 100644 --- a/test/aws-s3/aws-s3-knative.feature +++ b/test/aws-s3/aws-s3-knative.feature @@ -20,6 +20,10 @@ Feature: AWS S3 Kamelet - Knative binding Given New global Camel context Given load to Camel registry amazonS3Client.groovy + Scenario: Create Knative broker + Given create Knative broker default + And Knative broker default is running + Scenario: Create AWS-S3 Kamelet to Knative binding Given variable loginfo is "Installed features" When load KameletBinding aws-s3-to-knative.yaml @@ -42,6 +46,7 @@ Feature: AWS S3 Kamelet - Knative binding Scenario: Remove Camel K resources Given delete KameletBinding aws-s3-to-knative Given delete Kubernetes service event-consumer-service + Given delete Knative broker default Scenario: Stop container Given stop LocalStack container diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml index 0d70ba755..4ef910c43 100644 --- a/test/aws-s3/yaks-config.yaml +++ b/test/aws-s3/yaks-config.yaml @@ -43,7 +43,7 @@ config: - aws-s3-to-log-secret-based.groovy - aws-s3-uri-binding.yaml - aws-s3-to-knative.yaml - - ../utils/knative-to-log.yaml + - ../utils/knative-channel-to-log.yaml cucumber: tags: - "not @ignored" diff --git a/test/utils/knative-channel-to-log.yaml b/test/utils/knative-channel-to-log.yaml new file mode 100644 index 000000000..4fc551c6f --- /dev/null +++ b/test/utils/knative-channel-to-log.yaml @@ -0,0 +1,34 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: knative-channel-to-log +spec: + source: + ref: + kind: InMemoryChannel + apiVersion: messaging.knative.dev/v1 + name: messages + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: log-sink + properties: + showHeaders: true From dd0c65ebbe0bab5326aef53bbf928d4e4a66965a Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Wed, 30 Nov 2022 15:56:53 +0100 Subject: [PATCH 24/28] Simplify Json model data type - Remove JacksonDataFormat in favor of using simple ObjectMapper instance - Reuse ObjectMapper instance for all exchanges processed by the data type --- .../converter/standard/JsonModelDataType.java | 22 ++++++++++++++----- .../standard/JsonModelDataTypeTest.java | 14 +++++++++++- 2 files changed, 30 insertions(+), 6 deletions(-) diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java index 0a80ee328..183f11123 100644 --- a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataType.java @@ -26,7 +26,6 @@ import org.apache.camel.CamelExecutionException; import org.apache.camel.Exchange; import org.apache.camel.InvalidPayloadException; -import org.apache.camel.component.jackson.JacksonDataFormat; import org.apache.camel.kamelets.utils.format.spi.DataTypeConverter; import org.apache.camel.kamelets.utils.format.spi.annotations.DataType; import org.apache.camel.util.ObjectHelper; @@ -41,21 +40,30 @@ public class JsonModelDataType implements DataTypeConverter, CamelContextAware { public static final String DATA_TYPE_MODEL_PROPERTY = "CamelDataTypeModel"; + private String model; + private CamelContext camelContext; private static final ObjectMapper mapper = new ObjectMapper(); @Override public void convert(Exchange exchange) { - if (!exchange.hasProperties() || !exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) { + String type; + if (exchange.hasProperties() && exchange.getProperties().containsKey(DATA_TYPE_MODEL_PROPERTY)) { + type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class); + } else { + type = model; + } + + if (type == null) { + // neither model property nor exchange property defines proper type - do nothing return; } ObjectHelper.notNull(camelContext, "camelContext"); - String type = exchange.getProperty(DATA_TYPE_MODEL_PROPERTY, String.class); - try (JacksonDataFormat dataFormat = new JacksonDataFormat(mapper, camelContext.getClassResolver().resolveMandatoryClass(type))) { - Object unmarshalled = dataFormat.unmarshal(exchange, getBodyAsStream(exchange)); + try { + Object unmarshalled = mapper.reader().forType(camelContext.getClassResolver().resolveMandatoryClass(type)).readValue(getBodyAsStream(exchange)); exchange.getMessage().setBody(unmarshalled); } catch (Exception e) { throw new CamelExecutionException( @@ -78,6 +86,10 @@ public CamelContext getCamelContext() { return camelContext; } + public void setModel(String model) { + this.model = model; + } + @Override public void setCamelContext(CamelContext camelContext) { this.camelContext = camelContext; diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java index cb253a163..7785017c6 100644 --- a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/format/converter/standard/JsonModelDataTypeTest.java @@ -44,7 +44,19 @@ public void setup() { } @Test - void shouldMapFromStringToJsonModel() throws Exception { + void shouldMapStringToJsonModelWithModelProperty() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody("{ \"name\": \"Rajesh\", \"age\": 28}"); + dataType.setModel(Person.class.getName()); + dataType.convert(exchange); + + assertEquals(Person.class, exchange.getMessage().getBody().getClass()); + assertEquals("Rajesh", exchange.getMessage().getBody(Person.class).getName()); + } + + @Test + void shouldMapStringToJsonModelWithExchangeProperty() throws Exception { Exchange exchange = new DefaultExchange(camelContext); exchange.setProperty(JsonModelDataType.DATA_TYPE_MODEL_PROPERTY, Person.class.getName()); From 9dd325130264b2661b8fd074014a4a0fe044cbd1 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Wed, 30 Nov 2022 17:45:51 +0100 Subject: [PATCH 25/28] Fix Knative YAKS tests --- test/aws-s3/aws-s3-cloudevents.feature | 3 +- test/aws-s3/aws-s3-knative-binding.feature | 12 +++--- test/aws-s3/aws-s3-knative.feature | 1 + test/aws-s3/aws-s3-to-knative-channel.yaml | 46 ++++++++++++++++++++++ test/aws-s3/yaks-config.yaml | 1 + 5 files changed, 56 insertions(+), 7 deletions(-) create mode 100644 test/aws-s3/aws-s3-to-knative-channel.yaml diff --git a/test/aws-s3/aws-s3-cloudevents.feature b/test/aws-s3/aws-s3-cloudevents.feature index 52ac84c54..5774b7382 100644 --- a/test/aws-s3/aws-s3-cloudevents.feature +++ b/test/aws-s3/aws-s3-cloudevents.feature @@ -1,3 +1,4 @@ +@knative Feature: AWS S3 Kamelet - cloud events data type Background: @@ -34,7 +35,7 @@ Feature: AWS S3 Kamelet - cloud events data type Scenario: Verify Kamelet source Given create Knative event consumer service event-consumer-service Given create Knative trigger event-service-trigger on service event-consumer-service with filter on attributes - | type | kamelet.aws.s3.source | + | type | org.apache.camel.event.aws.s3.getObject | Given Camel exchange message header CamelAwsS3Key="${aws.s3.key}" Given send Camel exchange to("aws2-s3://${aws.s3.bucketNameOrArn}?amazonS3Client=#amazonS3Client") with body: ${aws.s3.message} Then expect Knative event data: ${aws.s3.message} diff --git a/test/aws-s3/aws-s3-knative-binding.feature b/test/aws-s3/aws-s3-knative-binding.feature index e94ab7157..cf67b4c9c 100644 --- a/test/aws-s3/aws-s3-knative-binding.feature +++ b/test/aws-s3/aws-s3-knative-binding.feature @@ -1,5 +1,5 @@ @knative -Feature: AWS S3 Kamelet - binding to Knative +Feature: AWS S3 Kamelet - binding to Knative channel Background: Given Kamelet aws-s3-source is available @@ -24,13 +24,13 @@ Feature: AWS S3 Kamelet - binding to Knative Scenario: Create AWS-S3 Kamelet to InMemoryChannel binding Given variable loginfo is "Installed features" - Given load KameletBinding aws-s3-to-knative.yaml + Given load KameletBinding aws-s3-to-knative-channel.yaml Given load KameletBinding knative-channel-to-log.yaml - Then KameletBinding aws-s3-to-knative should be available + Then KameletBinding aws-s3-to-knative-channel should be available And KameletBinding knative-channel-to-log should be available - And Camel K integration aws-s3-to-knative is running + And Camel K integration aws-s3-to-knative-channel is running And Camel K integration knative-channel-to-log is running - And Camel K integration aws-s3-to-knative should print ${loginfo} + And Camel K integration aws-s3-to-knative-channel should print ${loginfo} And Camel K integration knative-channel-to-log should print ${loginfo} Then sleep 10000 ms @@ -40,7 +40,7 @@ Feature: AWS S3 Kamelet - binding to Knative Then Camel K integration knative-channel-to-log should print ${aws.s3.message} Scenario: Remove resources - Given delete KameletBinding aws-s3-to-knative + Given delete KameletBinding aws-s3-to-knative-channel Given delete KameletBinding knative-channel-to-log Given delete Knative broker default Given delete Knative channel messages diff --git a/test/aws-s3/aws-s3-knative.feature b/test/aws-s3/aws-s3-knative.feature index fe080fa21..dc3587977 100644 --- a/test/aws-s3/aws-s3-knative.feature +++ b/test/aws-s3/aws-s3-knative.feature @@ -1,3 +1,4 @@ +@knative Feature: AWS S3 Kamelet - Knative binding Background: diff --git a/test/aws-s3/aws-s3-to-knative-channel.yaml b/test/aws-s3/aws-s3-to-knative-channel.yaml new file mode 100644 index 000000000..5383ae613 --- /dev/null +++ b/test/aws-s3/aws-s3-to-knative-channel.yaml @@ -0,0 +1,46 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-s3-to-knative-channel +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-s3-source + properties: + bucketNameOrArn: ${aws.s3.bucketNameOrArn} + overrideEndpoint: true + uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL} + accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} + secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} + region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION} + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: log-sink + properties: + showHeaders: true + sink: + ref: + kind: InMemoryChannel + apiVersion: messaging.knative.dev/v1 + name: messages diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml index 4ef910c43..6f1a0d0d4 100644 --- a/test/aws-s3/yaks-config.yaml +++ b/test/aws-s3/yaks-config.yaml @@ -43,6 +43,7 @@ config: - aws-s3-to-log-secret-based.groovy - aws-s3-uri-binding.yaml - aws-s3-to-knative.yaml + - aws-s3-to-knative-channel.yaml - ../utils/knative-channel-to-log.yaml cucumber: tags: From 11a845067f6d59aa1fbf0e34b8bb626bff411c7e Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Wed, 30 Nov 2022 22:02:13 +0100 Subject: [PATCH 26/28] Revert existing Kamelets to not use data type converter - AWS S3 source Kamelet - AWS DDB sink Kamelet - JsonToDdbModelConverter utility and unit tests --- kamelets/aws-ddb-sink.kamelet.yaml | 25 +-- kamelets/aws-s3-source.kamelet.yaml | 19 -- .../aws/ddb/JsonToDdbModelConverter.java | 201 ++++++++++++++++++ .../aws/ddb/JsonToDdbModelConverterTest.java | 184 ++++++++++++++++ .../kamelets/aws-ddb-sink.kamelet.yaml | 25 +-- .../kamelets/aws-s3-source.kamelet.yaml | 19 -- 6 files changed, 395 insertions(+), 78 deletions(-) create mode 100644 library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java create mode 100644 library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java diff --git a/kamelets/aws-ddb-sink.kamelet.yaml b/kamelets/aws-ddb-sink.kamelet.yaml index 87b338ee4..5b603abfc 100644 --- a/kamelets/aws-ddb-sink.kamelet.yaml +++ b/kamelets/aws-ddb-sink.kamelet.yaml @@ -97,12 +97,6 @@ spec: x-descriptors: - 'urn:alm:descriptor:com.tectonic.ui:checkbox' default: false - inputFormat: - title: Input Type - description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type. - type: string - default: json - example: json types: in: mediaType: application/json @@ -113,26 +107,17 @@ spec: - "camel:aws2-ddb" - "camel:kamelet" template: - beans: - - name: dataTypeRegistry - type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" - - name: inputTypeProcessor - type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" - property: - - key: scheme - value: 'aws2-ddb' - - key: format - value: '{{inputFormat}}' - - key: registry - value: '#bean:{{dataTypeRegistry}}' from: uri: "kamelet:source" steps: - set-property: name: operation constant: "{{operation}}" - - process: - ref: "{{inputTypeProcessor}}" + - unmarshal: + json: + library: Jackson + unmarshalType: com.fasterxml.jackson.databind.JsonNode + - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter" - to: uri: "aws2-ddb:{{table}}" parameters: diff --git a/kamelets/aws-s3-source.kamelet.yaml b/kamelets/aws-s3-source.kamelet.yaml index d937f6e5a..6ab2bca41 100644 --- a/kamelets/aws-s3-source.kamelet.yaml +++ b/kamelets/aws-s3-source.kamelet.yaml @@ -107,12 +107,6 @@ spec: description: The number of milliseconds before the next poll of the selected bucket. type: integer default: 500 - outputFormat: - title: Output Type - description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type. - type: string - default: binary - example: binary dependencies: - "camel:core" - "camel:aws2-s3" @@ -120,17 +114,6 @@ spec: - "camel:kamelet" template: beans: - - name: dataTypeRegistry - type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" - - name: outputTypeProcessor - type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" - property: - - key: scheme - value: 'aws2-s3' - - key: format - value: '{{outputFormat}}' - - key: registry - value: '#bean:{{dataTypeRegistry}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: @@ -160,6 +143,4 @@ spec: steps: - process: ref: "{{renameHeaders}}" - - process: - ref: "{{outputTypeProcessor}}" - to: "kamelet:sink" diff --git a/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java new file mode 100644 index 000000000..2a203ed03 --- /dev/null +++ b/library/camel-kamelets-utils/src/main/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverter.java @@ -0,0 +1,201 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.kamelets.utils.transform.aws.ddb; + +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.camel.Exchange; +import org.apache.camel.ExchangeProperty; +import org.apache.camel.InvalidPayloadException; +import org.apache.camel.component.aws2.ddb.Ddb2Constants; +import org.apache.camel.component.aws2.ddb.Ddb2Operations; +import software.amazon.awssdk.services.dynamodb.model.AttributeAction; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; +import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate; +import software.amazon.awssdk.services.dynamodb.model.ReturnValue; + +/** + * Maps Json body to DynamoDB attribute value map and sets the attribute map as Camel DynamoDB header entries. + * + * Json property names map to attribute keys and Json property values map to attribute values. + * + * During mapping the Json property types resolve to the respective attribute types ({@code String, StringSet, Boolean, Number, NumberSet, Map, Null}). + * Primitive typed arrays in Json get mapped to {@code StringSet} or {@code NumberSet} attribute values. + * + * For PutItem operation the Json body defines all item attributes. + * + * For DeleteItem operation the Json body defines only the primary key attributes that identify the item to delete. + * + * For UpdateItem operation the Json body defines both key attributes to identify the item to be updated and all item attributes tht get updated on the item. + * + * The given Json body can use "key" and "item" as top level properties. + * Both define a Json object that will be mapped to respective attribute value maps: + *

{@code
+ * {
+ *   "key": {},
+ *   "item": {}
+ * }
+ * }
+ * 
+ * The converter will extract the objects and set respective attribute value maps as header entries. + * This is a comfortable way to define different key and item attribute value maps e.g. on UpdateItem operation. + * + * In case key and item attribute value maps are identical you can omit the special top level properties completely. + * The converter will map the whole Json body as is then and use it as source for the attribute value map. + */ +public class JsonToDdbModelConverter { + + public String process(@ExchangeProperty("operation") String operation, Exchange exchange) throws InvalidPayloadException { + if (exchange.getMessage().getHeaders().containsKey(Ddb2Constants.ITEM) || + exchange.getMessage().getHeaders().containsKey(Ddb2Constants.KEY)) { + return ""; + } + + ObjectMapper mapper = new ObjectMapper(); + + JsonNode jsonBody = exchange.getMessage().getMandatoryBody(JsonNode.class); + + JsonNode key = jsonBody.get("key"); + JsonNode item = jsonBody.get("item"); + + Map keyProps; + if (key != null) { + keyProps = mapper.convertValue(key, new TypeReference>(){}); + } else { + keyProps = mapper.convertValue(jsonBody, new TypeReference>(){}); + } + + Map itemProps; + if (item != null) { + itemProps = mapper.convertValue(item, new TypeReference>(){}); + } else { + itemProps = keyProps; + } + + final Map keyMap = getAttributeValueMap(keyProps); + + switch (Ddb2Operations.valueOf(operation)) { + case PutItem: + exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.PutItem); + exchange.getMessage().setHeader(Ddb2Constants.ITEM, getAttributeValueMap(itemProps)); + setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_OLD.toString(), exchange); + break; + case UpdateItem: + exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.UpdateItem); + exchange.getMessage().setHeader(Ddb2Constants.KEY, keyMap); + exchange.getMessage().setHeader(Ddb2Constants.UPDATE_VALUES, getAttributeValueUpdateMap(itemProps)); + setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_NEW.toString(), exchange); + break; + case DeleteItem: + exchange.getMessage().setHeader(Ddb2Constants.OPERATION, Ddb2Operations.DeleteItem); + exchange.getMessage().setHeader(Ddb2Constants.KEY, keyMap); + setHeaderIfNotPresent(Ddb2Constants.RETURN_VALUES, ReturnValue.ALL_OLD.toString(), exchange); + break; + default: + throw new UnsupportedOperationException(String.format("Unsupported operation '%s'", operation)); + } + + return ""; + } + + private void setHeaderIfNotPresent(String headerName, Object value, Exchange exchange) { + exchange.getMessage().setHeader(headerName, value); + } + + private Map getAttributeValueMap(Map body) { + final Map attributeValueMap = new LinkedHashMap<>(); + + for (Map.Entry attribute : body.entrySet()) { + attributeValueMap.put(attribute.getKey(), getAttributeValue(attribute.getValue())); + } + + return attributeValueMap; + } + + private Map getAttributeValueUpdateMap(Map body) { + final Map attributeValueMap = new LinkedHashMap<>(); + + for (Map.Entry attribute : body.entrySet()) { + attributeValueMap.put(attribute.getKey(), getAttributeValueUpdate(attribute.getValue())); + } + + return attributeValueMap; + } + + private static AttributeValue getAttributeValue(Object value) { + if (value == null) { + return AttributeValue.builder().nul(true).build(); + } + + if (value instanceof String) { + return AttributeValue.builder().s(value.toString()).build(); + } + + if (value instanceof Integer) { + return AttributeValue.builder().n(value.toString()).build(); + } + + if (value instanceof Boolean) { + return AttributeValue.builder().bool((Boolean) value).build(); + } + + if (value instanceof String[]) { + return AttributeValue.builder().ss((String[]) value).build(); + } + + if (value instanceof int[]) { + return AttributeValue.builder().ns(Stream.of((int[]) value).map(Object::toString).collect(Collectors.toList())).build(); + } + + if (value instanceof List) { + List values = ((List) value); + + if (values.isEmpty()) { + return AttributeValue.builder().ss().build(); + } else if (values.get(0) instanceof Integer) { + return AttributeValue.builder().ns(values.stream().map(Object::toString).collect(Collectors.toList())).build(); + } else { + return AttributeValue.builder().ss(values.stream().map(Object::toString).collect(Collectors.toList())).build(); + } + } + + if (value instanceof Map) { + Map nestedAttributes = new LinkedHashMap<>(); + + for (Map.Entry nested : ((Map) value).entrySet()) { + nestedAttributes.put(nested.getKey().toString(), getAttributeValue(nested.getValue())); + } + + return AttributeValue.builder().m(nestedAttributes).build(); + } + + return AttributeValue.builder().s(value.toString()).build(); + } + + private static AttributeValueUpdate getAttributeValueUpdate(Object value) { + return AttributeValueUpdate.builder() + .action(AttributeAction.PUT) + .value(getAttributeValue(value)).build(); + } +} \ No newline at end of file diff --git a/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java new file mode 100644 index 000000000..e88dce4e3 --- /dev/null +++ b/library/camel-kamelets-utils/src/test/java/org/apache/camel/kamelets/utils/transform/aws/ddb/JsonToDdbModelConverterTest.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.camel.kamelets.utils.transform.aws.ddb; + +import java.util.Map; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.camel.Exchange; +import org.apache.camel.component.aws2.ddb.Ddb2Constants; +import org.apache.camel.component.aws2.ddb.Ddb2Operations; +import org.apache.camel.impl.DefaultCamelContext; +import org.apache.camel.support.DefaultExchange; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import software.amazon.awssdk.services.dynamodb.model.AttributeAction; +import software.amazon.awssdk.services.dynamodb.model.AttributeValue; +import software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate; +import software.amazon.awssdk.services.dynamodb.model.ReturnValue; + +class JsonToDdbModelConverterTest { + + private DefaultCamelContext camelContext; + + private final ObjectMapper mapper = new ObjectMapper(); + + private final JsonToDdbModelConverter processor = new JsonToDdbModelConverter(); + + private final String keyJson = "{" + + "\"name\": \"Rajesh Koothrappali\"" + + "}"; + + private final String itemJson = "{" + + "\"name\": \"Rajesh Koothrappali\"," + + "\"age\": 29," + + "\"super-heroes\": [\"batman\", \"spiderman\", \"wonderwoman\"]," + + "\"issues\": [5, 3, 9, 1]," + + "\"girlfriend\": null," + + "\"doctorate\": true" + + "}"; + + @BeforeEach + void setup() { + this.camelContext = new DefaultCamelContext(); + } + + @Test + @SuppressWarnings("unchecked") + void shouldMapPutItemHeaders() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(mapper.readTree(itemJson)); + + processor.process(Ddb2Operations.PutItem.name(), exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); + Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); + + assertAttributeValueMap(exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class)); + } + + @Test + @SuppressWarnings("unchecked") + void shouldMapUpdateItemHeaders() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + ", \"item\": " + itemJson + "}")); + + processor.process(Ddb2Operations.UpdateItem.name(), exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + Assertions.assertEquals(Ddb2Operations.UpdateItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); + Assertions.assertEquals(ReturnValue.ALL_NEW.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); + + Map attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.KEY, Map.class); + Assertions.assertEquals(1L, attributeValueMap.size()); + Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name")); + + assertAttributeValueUpdateMap(exchange.getMessage().getHeader(Ddb2Constants.UPDATE_VALUES, Map.class)); + } + + @Test + @SuppressWarnings("unchecked") + void shouldMapDeleteItemHeaders() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(mapper.readTree("{\"key\": " + keyJson + "}")); + + processor.process(Ddb2Operations.DeleteItem.name(), exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + Assertions.assertEquals(Ddb2Operations.DeleteItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); + Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); + + Map attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.KEY, Map.class); + Assertions.assertEquals(1L, attributeValueMap.size()); + Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name")); + } + + @Test + @SuppressWarnings("unchecked") + void shouldMapNestedObjects() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(mapper.readTree("{\"user\":" + itemJson + "}")); + + processor.process(Ddb2Operations.PutItem.name(), exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); + Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); + + Map attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class); + Assertions.assertEquals(1L, attributeValueMap.size()); + + Assertions.assertEquals("AttributeValue(M={name=AttributeValue(S=Rajesh Koothrappali), " + + "age=AttributeValue(N=29), " + + "super-heroes=AttributeValue(SS=[batman, spiderman, wonderwoman]), " + + "issues=AttributeValue(NS=[5, 3, 9, 1]), " + + "girlfriend=AttributeValue(NUL=true), " + + "doctorate=AttributeValue(BOOL=true)})", attributeValueMap.get("user").toString()); + } + + @Test + @SuppressWarnings("unchecked") + void shouldMapEmptyJson() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(mapper.readTree("{}")); + + processor.process(Ddb2Operations.PutItem.name(), exchange); + + Assertions.assertTrue(exchange.getMessage().hasHeaders()); + Assertions.assertEquals(Ddb2Operations.PutItem, exchange.getMessage().getHeader(Ddb2Constants.OPERATION)); + Assertions.assertEquals(ReturnValue.ALL_OLD.toString(), exchange.getMessage().getHeader(Ddb2Constants.RETURN_VALUES)); + + Map attributeValueMap = exchange.getMessage().getHeader(Ddb2Constants.ITEM, Map.class); + Assertions.assertEquals(0L, attributeValueMap.size()); + } + + @Test() + void shouldFailForUnsupportedOperation() throws Exception { + Exchange exchange = new DefaultExchange(camelContext); + + exchange.getMessage().setBody(mapper.readTree("{}")); + + Assertions.assertThrows(UnsupportedOperationException.class, () -> processor.process(Ddb2Operations.BatchGetItems.name(), exchange)); + } + + private void assertAttributeValueMap(Map attributeValueMap) { + Assertions.assertEquals(6L, attributeValueMap.size()); + Assertions.assertEquals(AttributeValue.builder().s("Rajesh Koothrappali").build(), attributeValueMap.get("name")); + Assertions.assertEquals(AttributeValue.builder().n("29").build(), attributeValueMap.get("age")); + Assertions.assertEquals(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build(), attributeValueMap.get("super-heroes")); + Assertions.assertEquals(AttributeValue.builder().ns("5", "3", "9", "1").build(), attributeValueMap.get("issues")); + Assertions.assertEquals(AttributeValue.builder().nul(true).build(), attributeValueMap.get("girlfriend")); + Assertions.assertEquals(AttributeValue.builder().bool(true).build(), attributeValueMap.get("doctorate")); + } + + private void assertAttributeValueUpdateMap(Map attributeValueMap) { + Assertions.assertEquals(6L, attributeValueMap.size()); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().s("Rajesh Koothrappali").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("name")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().n("29").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("age")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ss("batman", "spiderman", "wonderwoman").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("super-heroes")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().ns("5", "3", "9", "1").build()).action(AttributeAction.PUT).build(), attributeValueMap.get("issues")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().nul(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("girlfriend")); + Assertions.assertEquals(AttributeValueUpdate.builder().value(AttributeValue.builder().bool(true).build()).action(AttributeAction.PUT).build(), attributeValueMap.get("doctorate")); + } +} \ No newline at end of file diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml index 87b338ee4..5b603abfc 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-sink.kamelet.yaml @@ -97,12 +97,6 @@ spec: x-descriptors: - 'urn:alm:descriptor:com.tectonic.ui:checkbox' default: false - inputFormat: - title: Input Type - description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type. - type: string - default: json - example: json types: in: mediaType: application/json @@ -113,26 +107,17 @@ spec: - "camel:aws2-ddb" - "camel:kamelet" template: - beans: - - name: dataTypeRegistry - type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" - - name: inputTypeProcessor - type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" - property: - - key: scheme - value: 'aws2-ddb' - - key: format - value: '{{inputFormat}}' - - key: registry - value: '#bean:{{dataTypeRegistry}}' from: uri: "kamelet:source" steps: - set-property: name: operation constant: "{{operation}}" - - process: - ref: "{{inputTypeProcessor}}" + - unmarshal: + json: + library: Jackson + unmarshalType: com.fasterxml.jackson.databind.JsonNode + - bean: "org.apache.camel.kamelets.utils.transform.aws.ddb.JsonToDdbModelConverter" - to: uri: "aws2-ddb:{{table}}" parameters: diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml index d937f6e5a..6ab2bca41 100644 --- a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-source.kamelet.yaml @@ -107,12 +107,6 @@ spec: description: The number of milliseconds before the next poll of the selected bucket. type: integer default: 500 - outputFormat: - title: Output Type - description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type. - type: string - default: binary - example: binary dependencies: - "camel:core" - "camel:aws2-s3" @@ -120,17 +114,6 @@ spec: - "camel:kamelet" template: beans: - - name: dataTypeRegistry - type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" - - name: outputTypeProcessor - type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" - property: - - key: scheme - value: 'aws2-s3' - - key: format - value: '{{outputFormat}}' - - key: registry - value: '#bean:{{dataTypeRegistry}}' - name: renameHeaders type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" property: @@ -160,6 +143,4 @@ spec: steps: - process: ref: "{{renameHeaders}}" - - process: - ref: "{{outputTypeProcessor}}" - to: "kamelet:sink" From c8e3f160c39a281a8514d8c7a535f171c3f9cbef Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Wed, 30 Nov 2022 22:19:41 +0100 Subject: [PATCH 27/28] Add experimental Kamelets using data type converter API --- .github/workflows/yaks-tests.yaml | 6 + experimental/aws-ddb-sink.exp.kamelet.yaml | 146 ++++++++++++++++ experimental/aws-s3-source.exp.kamelet.yaml | 165 ++++++++++++++++++ .../test/aws-ddb-sink/amazonDDBClient.groovy | 53 ++++++ .../aws-ddb-sink/aws-ddb-sink-binding.yaml | 50 ++++++ .../aws-ddb-sink-deleteItem.feature | 65 +++++++ .../aws-ddb-sink/aws-ddb-sink-putItem.feature | 58 ++++++ .../aws-ddb-sink-updateItem.feature | 68 ++++++++ experimental/test/aws-ddb-sink/putItem.groovy | 30 ++++ .../test/aws-ddb-sink/verifyItems.groovy | 18 ++ .../test/aws-ddb-sink/yaks-config.yaml | 62 +++++++ .../test/aws-s3/amazonS3Client.groovy | 36 ++++ .../test}/aws-s3/aws-s3-cloudevents.feature | 2 + .../test}/aws-s3/aws-s3-knative.feature | 2 + .../test}/aws-s3/aws-s3-to-knative.yaml | 2 +- experimental/test/aws-s3/yaks-config.yaml | 69 ++++++++ test/aws-s3/yaks-config.yaml | 1 - 17 files changed, 831 insertions(+), 2 deletions(-) create mode 100644 experimental/aws-ddb-sink.exp.kamelet.yaml create mode 100644 experimental/aws-s3-source.exp.kamelet.yaml create mode 100644 experimental/test/aws-ddb-sink/amazonDDBClient.groovy create mode 100644 experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml create mode 100644 experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature create mode 100644 experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature create mode 100644 experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature create mode 100644 experimental/test/aws-ddb-sink/putItem.groovy create mode 100644 experimental/test/aws-ddb-sink/verifyItems.groovy create mode 100644 experimental/test/aws-ddb-sink/yaks-config.yaml create mode 100644 experimental/test/aws-s3/amazonS3Client.groovy rename {test => experimental/test}/aws-s3/aws-s3-cloudevents.feature (96%) rename {test => experimental/test}/aws-s3/aws-s3-knative.feature (96%) rename {test => experimental/test}/aws-s3/aws-s3-to-knative.yaml (97%) create mode 100644 experimental/test/aws-s3/yaks-config.yaml diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml index 73dcec77c..7f168ca25 100644 --- a/.github/workflows/yaks-tests.yaml +++ b/.github/workflows/yaks-tests.yaml @@ -65,6 +65,7 @@ jobs: # Overwrite JitPack coordinates in the local Kamelets so the tests can use the utility classes in this PR find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} + + find experimental -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} + - name: Get Camel K CLI run: | curl --fail -L --silent https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz -o kamel.tar.gz @@ -121,6 +122,11 @@ jobs: yaks run test/earthquake-source $YAKS_RUN_OPTIONS yaks run test/rest-openapi-sink $YAKS_RUN_OPTIONS yaks run test/kafka $YAKS_RUN_OPTIONS + - name: YAKS Tests on experimental Kamelets + run: | + echo "Running tests for experimental Kamelets" + yaks run experimental/test/aws-ddb-sink $YAKS_RUN_OPTIONS + yaks run experimental/test/aws-s3 $YAKS_RUN_OPTIONS - name: YAKS Report if: failure() run: | diff --git a/experimental/aws-ddb-sink.exp.kamelet.yaml b/experimental/aws-ddb-sink.exp.kamelet.yaml new file mode 100644 index 000000000..e19185fa0 --- /dev/null +++ b/experimental/aws-ddb-sink.exp.kamelet.yaml @@ -0,0 +1,146 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: Kamelet +metadata: + name: aws-ddb-sink-experimental + annotations: + camel.apache.org/kamelet.support.level: "Experiemental" + camel.apache.org/catalog.version: "main-SNAPSHOT" + camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNMzkuNTA2IDEzLjY2N2gyMC4zMjF2NzEuOTk5SDM5LjUwNnpNNzQuMTMxIDY3LjU2NFY3OC41Mmw3LjQ4LTguNzY0di03LjYwMmwtNy40MzcgNS4zOTd6TTc0LjEzMSA2Mi45MzZsLjA0My0uMDEgNy40MzctNHYtNy42NDlsLTcuNDguNjg4ek03NC4xNzQgMzYuNDI5bC0uMDQzLS4wMVY0Ny4zNWw3LjQ4LjY5OXYtNy42NDV6Ii8+PHBhdGggZmlsbD0iIzFBNDc2RiIgZD0iTTgxLjYxMSA0OC4wNDlsLTcuNDgtLjY5OS0xNC4zMDMtLjU3MkgzOS41MDZsLTE0LjMwMy41NzJWMzYuNDQzbC0uMDE1LjAwOC4wMTUtLjAzMiAxNC4zMDMtMy4zMTRINTkuODI4bDE0LjMwMyAzLjMxNCA1LjI1OCAyLjc5NXYtMS43OTdsMi4yMjItLjI0My03LjQ4LTUuNDEtMTQuMzAzLTQuNDMySDM5LjUwNmwtMTQuMzAzIDQuNDMyVjIwLjgwN2wtNy40OCA4Ljc2M3Y3LjY1M2wuMDU4LS4wNDIgMi4xNjQuMjM2djEuODM0bC0yLjIyMiAxLjE4OXY3LjYxNWwuMDU4LS4wMDYgMi4xNjQuMDMydjMuMTk2bC0xLjg2Ny4wMjgtLjM1NS0uMDM0djcuNjE4bDIuMjIyIDEuMTk1djEuODU1bC0yLjEyOS4yMzUtLjA5My0uMDd2Ny42NTJsNy40OCA4Ljc2NFY2Ny41NjRsMTQuMzAzIDQuNDMySDU5LjgyOGwxNC4zNDUtNC40NDUgNy40MzgtNS4zNjctMi4yMjItLjI0NXYtMS44MThsLTUuMjE2IDIuODA1LTE0LjM0NSAzLjI5NXYuMDA0SDM5LjUwNnYtLjAwNGwtMTQuMzQ4LTMuMjk1LS4wMjUtLjA1MS4wNy4wMzdWNTEuOTY1bDE0LjMwMy41N3YuMDE0SDU5LjgyOHYtLjAxNGwxNC4zMDMtLjU3IDcuNDgtLjY1Ni0yLjIyMi0uMDMydi0zLjE5NnoiLz48L3N2Zz4=" + camel.apache.org/provider: "Apache Software Foundation" + camel.apache.org/kamelet.group: "AWS DynamoDB Streams" + labels: + camel.apache.org/kamelet.type: "sink" +spec: + definition: + title: "AWS DynamoDB Sink" + description: |- + Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table. + + The basic authentication method for the AWS DynamoDB service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider. + + If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method. + + This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. + required: + - table + - region + type: object + properties: + table: + title: Table + description: The name of the DynamoDB table. + type: string + accessKey: + title: Access Key + description: The access key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + secretKey: + title: Secret Key + description: The secret key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + region: + title: AWS Region + description: The AWS region to access. + type: string + enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "aws-cn-global", "aws-us-gov-global", "aws-iso-global", "aws-iso-b-global"] + operation: + title: Operation + description: "The operation to perform. The options are PutItem, UpdateItem, or DeleteItem." + type: string + default: PutItem + example: PutItem + writeCapacity: + title: Write Capacity + description: The provisioned throughput to reserve for writing resources to your table. + type: integer + default: 1 + useDefaultCredentialsProvider: + title: Default Credentials Provider + description: If true, the DynamoDB client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key). + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + uriEndpointOverride: + title: Overwrite Endpoint URI + description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option. + type: string + overrideEndpoint: + title: Endpoint Overwrite + description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + inputFormat: + title: Input Type + description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type. + type: string + default: json + example: json + types: + in: + mediaType: application/json + dependencies: + - github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT + - "camel:core" + - "camel:jackson" + - "camel:aws2-ddb" + - "camel:kamelet" + template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: inputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-ddb' + - key: format + value: '{{inputFormat}}' + - key: registry + value: '#bean:{{dataTypeRegistry}}' + from: + uri: "kamelet:source" + steps: + - set-property: + name: operation + constant: "{{operation}}" + - process: + ref: "{{inputTypeProcessor}}" + - to: + uri: "aws2-ddb:{{table}}" + parameters: + secretKey: "{{?secretKey}}" + accessKey: "{{?accessKey}}" + region: "{{region}}" + operation: "{{operation}}" + writeCapacity: "{{?writeCapacity}}" + useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}" + uriEndpointOverride: "{{?uriEndpointOverride}}" + overrideEndpoint: "{{overrideEndpoint}}" diff --git a/experimental/aws-s3-source.exp.kamelet.yaml b/experimental/aws-s3-source.exp.kamelet.yaml new file mode 100644 index 000000000..7a8d8fe58 --- /dev/null +++ b/experimental/aws-s3-source.exp.kamelet.yaml @@ -0,0 +1,165 @@ +apiVersion: camel.apache.org/v1alpha1 +kind: Kamelet +metadata: + name: aws-s3-source-experimental + annotations: + camel.apache.org/kamelet.support.level: "Experimental" + camel.apache.org/catalog.version: "main-SNAPSHOT" + camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHg9IjAiIHk9IjAiIHZpZXdCb3g9IjAgMCAyNDguMiAzMDAiIHhtbDpzcGFjZT0icHJlc2VydmUiPjxzdHlsZT4uc3QyOHtmaWxsOiM4YzMxMjN9LnN0Mjl7ZmlsbDojZTA1MjQzfTwvc3R5bGU+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik0yMCA1Mi4xTDAgNjJ2MTc1LjVsMjAgOS45LjEtLjFWNTIuMmwtLjEtLjEiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNyAyMjJMMjAgMjQ3LjVWNTIuMUwxMjcgNzd2MTQ1Ii8+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik03OC43IDE4Mi4xbDQ1LjQgNS44LjMtLjcuMy03NC40LS41LS42LTQ1LjQgNS43LS4xIDY0LjIiLz48cGF0aCBjbGFzcz0ic3QyOCIgZD0iTTEyNC4xIDIyMi4zbDEwNC4xIDI1LjIuMi0uM1Y1Mi4xbC0uMi0uMi0xMDQuMSAyNS40djE0NSIvPjxwYXRoIGNsYXNzPSJzdDI5IiBkPSJNMTY5LjUgMTgyLjFsLTQ1LjQgNS44di03NS43bDQ1LjQgNS43djY0LjIiLz48cGF0aCBkPSJNMTY5LjUgODYuOWwtNDUuNCA4LjMtNDUuNC04LjNMMTI0IDc1bDQ1LjUgMTEuOSIgZmlsbD0iIzVlMWYxOCIvPjxwYXRoIGQ9Ik0xNjkuNSAyMTMuMWwtNDUuNC04LjMtNDUuNCA4LjMgNDUuMyAxMi43IDQ1LjUtMTIuNyIgZmlsbD0iI2YyYjBhOSIvPjxwYXRoIGNsYXNzPSJzdDI4IiBkPSJNNzguNyA4Ni45bDQ1LjQtMTEuMi40LS4xVi4zbC0uNC0uMy00NS40IDIyLjd2NjQuMiIvPjxwYXRoIGNsYXNzPSJzdDI5IiBkPSJNMTY5LjUgODYuOWwtNDUuNC0xMS4yVjBsNDUuNCAyMi43djY0LjIiLz48cGF0aCBjbGFzcz0ic3QyOCIgZD0iTTEyNC4xIDMwMGwtNDUuNC0yMi43di02NC4ybDQ1LjQgMTEuMi43LjgtLjIgNzMuNi0uNSAxLjMiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNC4xIDMwMGw0NS40LTIyLjd2LTY0LjJsLTQ1LjQgMTEuMlYzMDBNMjI4LjIgNTIuMWwyMCAxMHYxNzUuNWwtMjAgMTBWNTIuMSIvPjwvc3ZnPg==" + camel.apache.org/provider: "Apache Software Foundation" + camel.apache.org/kamelet.group: "AWS S3" + labels: + camel.apache.org/kamelet.type: "source" +spec: + definition: + title: "AWS S3 Source" + description: |- + Receive data from an Amazon S3 Bucket. + + The basic authentication method for the S3 service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider. + + If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method. + + Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name + required: + - bucketNameOrArn + - region + type: object + properties: + bucketNameOrArn: + title: Bucket Name + description: The S3 Bucket name or Amazon Resource Name (ARN). + type: string + deleteAfterRead: + title: Auto-delete Objects + description: Specifies to delete objects after consuming them. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: true + accessKey: + title: Access Key + description: The access key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + secretKey: + title: Secret Key + description: The secret key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + region: + title: AWS Region + description: The AWS region to access. + type: string + enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "aws-cn-global", "aws-us-gov-global", "aws-iso-global", "aws-iso-b-global"] + autoCreateBucket: + title: Autocreate Bucket + description: Specifies to automatically create the S3 bucket. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + includeBody: + title: Include Body + description: If true, the exchange is consumed and put into the body and closed. If false, the S3Object stream is put raw into the body and the headers are set with the S3 object metadata. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: true + prefix: + title: Prefix + description: The AWS S3 bucket prefix to consider while searching. + type: string + example: 'folder/' + ignoreBody: + title: Ignore Body + description: If true, the S3 Object body is ignored. Setting this to true overrides any behavior defined by the `includeBody` option. If false, the S3 object is put in the body. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + useDefaultCredentialsProvider: + title: Default Credentials Provider + description: If true, the S3 client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key). + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + uriEndpointOverride: + title: Overwrite Endpoint URI + description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option. + type: string + overrideEndpoint: + title: Endpoint Overwrite + description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + delay: + title: Delay + description: The number of milliseconds before the next poll of the selected bucket. + type: integer + default: 500 + outputFormat: + title: Output Type + description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type. + type: string + default: binary + example: binary + dependencies: + - "camel:core" + - "camel:aws2-s3" + - "github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT" + - "camel:kamelet" + template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: outputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-s3' + - key: format + value: '{{outputFormat}}' + - key: registry + value: '#bean:{{dataTypeRegistry}}' + - name: renameHeaders + type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" + property: + - key: prefix + value: 'CamelAwsS3' + - key: renamingPrefix + value: 'aws.s3.' + - key: mode + value: 'filtering' + - key: selectedHeaders + value: 'CamelAwsS3Key,CamelAwsS3BucketName' + from: + uri: "aws2-s3:{{bucketNameOrArn}}" + parameters: + autoCreateBucket: "{{autoCreateBucket}}" + secretKey: "{{?secretKey}}" + accessKey: "{{?accessKey}}" + region: "{{region}}" + includeBody: "{{includeBody}}" + ignoreBody: "{{ignoreBody}}" + deleteAfterRead: "{{deleteAfterRead}}" + prefix: "{{?prefix}}" + useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}" + uriEndpointOverride: "{{?uriEndpointOverride}}" + overrideEndpoint: "{{overrideEndpoint}}" + delay: "{{delay}}" + steps: + - process: + ref: "{{renameHeaders}}" + - process: + ref: "{{outputTypeProcessor}}" + - to: "kamelet:sink" diff --git a/experimental/test/aws-ddb-sink/amazonDDBClient.groovy b/experimental/test/aws-ddb-sink/amazonDDBClient.groovy new file mode 100644 index 000000000..dc0b2a8bc --- /dev/null +++ b/experimental/test/aws-ddb-sink/amazonDDBClient.groovy @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider +import software.amazon.awssdk.regions.Region +import software.amazon.awssdk.services.dynamodb.DynamoDbClient +import software.amazon.awssdk.services.dynamodb.model.AttributeDefinition +import software.amazon.awssdk.services.dynamodb.model.KeySchemaElement +import software.amazon.awssdk.services.dynamodb.model.KeyType +import software.amazon.awssdk.services.dynamodb.model.ProvisionedThroughput +import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType + +DynamoDbClient amazonDDBClient = DynamoDbClient + .builder() + .endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_DYNAMODB_URL}")) + .credentialsProvider(StaticCredentialsProvider.create( + AwsBasicCredentials.create( + "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}", + "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}") + )) + .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}")) + .build() + +amazonDDBClient.createTable(b -> { + b.tableName("${aws.ddb.tableName}") + b.keySchema( + KeySchemaElement.builder().attributeName("id").keyType(KeyType.HASH).build(), + ) + b.attributeDefinitions( + AttributeDefinition.builder().attributeName("id").attributeType(ScalarAttributeType.N).build(), + ) + b.provisionedThroughput( + ProvisionedThroughput.builder() + .readCapacityUnits(1L) + .writeCapacityUnits(1L).build()) +}) + +return amazonDDBClient diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml b/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml new file mode 100644 index 000000000..6b4b2b024 --- /dev/null +++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml @@ -0,0 +1,50 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: KameletBinding +metadata: + name: aws-ddb-sink-binding +spec: + source: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: timer-source + properties: + period: ${timer.source.period} + message: '${aws.ddb.json.data}' + steps: + - ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: log-action + properties: + showHeaders: true + sink: + ref: + kind: Kamelet + apiVersion: camel.apache.org/v1alpha1 + name: aws-ddb-sink-experimental + properties: + table: ${aws.ddb.tableName} + operation: ${aws.ddb.operation} + overrideEndpoint: true + uriEndpointOverride: ${YAKS_TESTCONTAINERS_LOCALSTACK_DYNAMODB_URL} + accessKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY} + secretKey: ${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY} + region: ${YAKS_TESTCONTAINERS_LOCALSTACK_REGION} diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature b/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature new file mode 100644 index 000000000..6c54fdc36 --- /dev/null +++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature @@ -0,0 +1,65 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- +@experimental +Feature: AWS DDB Sink - DeleteItem + + Background: + Given Kamelet aws-ddb-sink-experimental is available + Given Camel K resource polling configuration + | maxAttempts | 200 | + | delayBetweenAttempts | 2000 | + Given variables + | timer.source.period | 10000 | + | aws.ddb.operation | DeleteItem | + | aws.ddb.tableName | movies | + | aws.ddb.item.id | 1 | + | aws.ddb.item.year | 1985 | + | aws.ddb.item.title | Back to the future | + | aws.ddb.json.data | {"id": ${aws.ddb.item.id}} | + + Scenario: Start LocalStack container + Given Enable service DYNAMODB + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-DDB client + Given New global Camel context + Given load to Camel registry amazonDDBClient.groovy + + Scenario: Create item on AWS-DDB + Given run script putItem.groovy + Given variables + | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title})}] | + Then run script verifyItems.groovy + + Scenario: Create AWS-DDB Kamelet sink binding + When load KameletBinding aws-ddb-sink-binding.yaml + And KameletBinding aws-ddb-sink-binding is available + And Camel K integration aws-ddb-sink-binding is running + And Camel K integration aws-ddb-sink-binding should print Routes startup + Then sleep 10sec + + Scenario: Verify Kamelet sink + Given variables + | aws.ddb.items | [] | + Then run script verifyItems.groovy + + Scenario: Remove Camel K resources + Given delete KameletBinding aws-ddb-sink-binding + + Scenario: Stop container + Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature b/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature new file mode 100644 index 000000000..f117889b9 --- /dev/null +++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature @@ -0,0 +1,58 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- +@experimental +Feature: AWS DDB Sink - PutItem + + Background: + Given Kamelet aws-ddb-sink-experimental is available + Given Camel K resource polling configuration + | maxAttempts | 200 | + | delayBetweenAttempts | 2000 | + Given variables + | timer.source.period | 10000 | + | aws.ddb.operation | PutItem | + | aws.ddb.tableName | movies | + | aws.ddb.item.id | 1 | + | aws.ddb.item.year | 1977 | + | aws.ddb.item.title | Star Wars IV | + | aws.ddb.json.data | { "id":${aws.ddb.item.id}, "year":${aws.ddb.item.year}, "title":"${aws.ddb.item.title}" } | + | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title})}] | + + Scenario: Start LocalStack container + Given Enable service DYNAMODB + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-DDB client + Given New global Camel context + Given load to Camel registry amazonDDBClient.groovy + + Scenario: Create AWS-DDB Kamelet sink binding + When load KameletBinding aws-ddb-sink-binding.yaml + And KameletBinding aws-ddb-sink-binding is available + And Camel K integration aws-ddb-sink-binding is running + And Camel K integration aws-ddb-sink-binding should print Routes startup + Then sleep 10sec + + Scenario: Verify Kamelet sink + Then run script verifyItems.groovy + + Scenario: Remove Camel K resources + Given delete KameletBinding aws-ddb-sink-binding + + Scenario: Stop container + Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature b/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature new file mode 100644 index 000000000..215adbe21 --- /dev/null +++ b/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature @@ -0,0 +1,68 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- +@experimental +Feature: AWS DDB Sink - UpdateItem + + Background: + Given Kamelet aws-ddb-sink-experimental is available + Given Camel K resource polling configuration + | maxAttempts | 200 | + | delayBetweenAttempts | 2000 | + Given variables + | timer.source.period | 10000 | + | aws.ddb.operation | UpdateItem | + | aws.ddb.tableName | movies | + | aws.ddb.item.id | 1 | + | aws.ddb.item.year | 1933 | + | aws.ddb.item.title | King Kong | + | aws.ddb.item.title.new | King Kong - Historical | + | aws.ddb.item.directors | ["Merian C. Cooper", "Ernest B. Schoedsack"] | + | aws.ddb.json.data | { "key": {"id": ${aws.ddb.item.id}}, "item": {"title": "${aws.ddb.item.title.new}", "year": ${aws.ddb.item.year}, "directors": ${aws.ddb.item.directors}} } | + + Scenario: Start LocalStack container + Given Enable service DYNAMODB + Given start LocalStack container + And log 'Started LocalStack container: ${YAKS_TESTCONTAINERS_LOCALSTACK_CONTAINER_NAME}' + + Scenario: Create AWS-DDB client + Given New global Camel context + Given load to Camel registry amazonDDBClient.groovy + + Scenario: Create item on AWS-DDB + Given run script putItem.groovy + Given variables + | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title})}] | + Then run script verifyItems.groovy + + Scenario: Create AWS-DDB Kamelet sink binding + When load KameletBinding aws-ddb-sink-binding.yaml + And KameletBinding aws-ddb-sink-binding is available + And Camel K integration aws-ddb-sink-binding is running + And Camel K integration aws-ddb-sink-binding should print Routes startup + Then sleep 10sec + + Scenario: Verify Kamelet sink + Given variables + | aws.ddb.item.directors | [Ernest B. Schoedsack, Merian C. Cooper] | + | aws.ddb.items | [{year=AttributeValue(N=${aws.ddb.item.year}), directors=AttributeValue(SS=${aws.ddb.item.directors}), id=AttributeValue(N=${aws.ddb.item.id}), title=AttributeValue(S=${aws.ddb.item.title.new})}] | + Then run script verifyItems.groovy + + Scenario: Remove Camel K resources + Given delete KameletBinding aws-ddb-sink-binding + + Scenario: Stop container + Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/putItem.groovy b/experimental/test/aws-ddb-sink/putItem.groovy new file mode 100644 index 000000000..fd482f903 --- /dev/null +++ b/experimental/test/aws-ddb-sink/putItem.groovy @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import software.amazon.awssdk.services.dynamodb.model.AttributeValue +import software.amazon.awssdk.services.dynamodb.model.ReturnValue + +Map item = new HashMap<>() +item.put("id", AttributeValue.builder().n("${aws.ddb.item.id}").build()) +item.put("year", AttributeValue.builder().n("${aws.ddb.item.year}").build()) +item.put("title", AttributeValue.builder().s("${aws.ddb.item.title}").build()) + +amazonDDBClient.putItem(b -> { + b.tableName("${aws.ddb.tableName}") + b.item(item) + b.returnValues(ReturnValue.ALL_OLD) +}) diff --git a/experimental/test/aws-ddb-sink/verifyItems.groovy b/experimental/test/aws-ddb-sink/verifyItems.groovy new file mode 100644 index 000000000..b6e9d27ce --- /dev/null +++ b/experimental/test/aws-ddb-sink/verifyItems.groovy @@ -0,0 +1,18 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +assert "${aws.ddb.items}".equals(amazonDDBClient.scan(b -> b.tableName("${aws.ddb.tableName}"))?.items()?.toString()) diff --git a/experimental/test/aws-ddb-sink/yaks-config.yaml b/experimental/test/aws-ddb-sink/yaks-config.yaml new file mode 100644 index 000000000..51cf3b527 --- /dev/null +++ b/experimental/test/aws-ddb-sink/yaks-config.yaml @@ -0,0 +1,62 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +config: + namespace: + temporary: false + runtime: + testcontainers: + enabled: true + env: + - name: YAKS_CAMEL_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_CAMELK_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_KAMELETS_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_TESTCONTAINERS_AUTO_REMOVE_RESOURCES + value: false + - name: CITRUS_TYPE_CONVERTER + value: camel + resources: + - putItem.groovy + - verifyItems.groovy + - amazonDDBClient.groovy + - aws-ddb-sink-binding.yaml + cucumber: + tags: + - "not @ignored" + settings: + dependencies: + - groupId: com.amazonaws + artifactId: aws-java-sdk-dynamodb + version: "@aws-java-sdk.version@" + - groupId: org.apache.camel + artifactId: camel-aws2-ddb + version: "@camel.version@" + - groupId: org.apache.camel + artifactId: camel-jackson + version: "@camel.version@" + dump: + enabled: true + failedOnly: true + includes: + - app=camel-k +pre: + - name: Install experimental Kamelets + run: | + kubectl apply -f ../../aws-ddb-sink.exp.kamelet.yaml -n $YAKS_NAMESPACE diff --git a/experimental/test/aws-s3/amazonS3Client.groovy b/experimental/test/aws-s3/amazonS3Client.groovy new file mode 100644 index 000000000..5c3ff8a01 --- /dev/null +++ b/experimental/test/aws-s3/amazonS3Client.groovy @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import software.amazon.awssdk.auth.credentials.AwsBasicCredentials +import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider +import software.amazon.awssdk.regions.Region +import software.amazon.awssdk.services.s3.S3Client + +S3Client s3 = S3Client + .builder() + .endpointOverride(URI.create("${YAKS_TESTCONTAINERS_LOCALSTACK_S3_URL}")) + .credentialsProvider(StaticCredentialsProvider.create( + AwsBasicCredentials.create( + "${YAKS_TESTCONTAINERS_LOCALSTACK_ACCESS_KEY}", + "${YAKS_TESTCONTAINERS_LOCALSTACK_SECRET_KEY}") + )) + .region(Region.of("${YAKS_TESTCONTAINERS_LOCALSTACK_REGION}")) + .build() + +s3.createBucket(b -> b.bucket("${aws.s3.bucketNameOrArn}")) + +return s3 diff --git a/test/aws-s3/aws-s3-cloudevents.feature b/experimental/test/aws-s3/aws-s3-cloudevents.feature similarity index 96% rename from test/aws-s3/aws-s3-cloudevents.feature rename to experimental/test/aws-s3/aws-s3-cloudevents.feature index 5774b7382..6f5513fc2 100644 --- a/test/aws-s3/aws-s3-cloudevents.feature +++ b/experimental/test/aws-s3/aws-s3-cloudevents.feature @@ -1,7 +1,9 @@ @knative +@experimental Feature: AWS S3 Kamelet - cloud events data type Background: + Given Kamelet aws-s3-source-experimental is available Given Knative event consumer timeout is 20000 ms Given Camel K resource polling configuration | maxAttempts | 200 | diff --git a/test/aws-s3/aws-s3-knative.feature b/experimental/test/aws-s3/aws-s3-knative.feature similarity index 96% rename from test/aws-s3/aws-s3-knative.feature rename to experimental/test/aws-s3/aws-s3-knative.feature index dc3587977..8a6512a9c 100644 --- a/test/aws-s3/aws-s3-knative.feature +++ b/experimental/test/aws-s3/aws-s3-knative.feature @@ -1,7 +1,9 @@ @knative +@experimental Feature: AWS S3 Kamelet - Knative binding Background: + Given Kamelet aws-s3-source-experimental is available Given Knative event consumer timeout is 20000 ms Given Camel K resource polling configuration | maxAttempts | 200 | diff --git a/test/aws-s3/aws-s3-to-knative.yaml b/experimental/test/aws-s3/aws-s3-to-knative.yaml similarity index 97% rename from test/aws-s3/aws-s3-to-knative.yaml rename to experimental/test/aws-s3/aws-s3-to-knative.yaml index e99ee20f1..afa1b5725 100644 --- a/test/aws-s3/aws-s3-to-knative.yaml +++ b/experimental/test/aws-s3/aws-s3-to-knative.yaml @@ -24,7 +24,7 @@ spec: ref: kind: Kamelet apiVersion: camel.apache.org/v1alpha1 - name: aws-s3-source + name: aws-s3-source-experimental properties: bucketNameOrArn: ${aws.s3.bucketNameOrArn} overrideEndpoint: true diff --git a/experimental/test/aws-s3/yaks-config.yaml b/experimental/test/aws-s3/yaks-config.yaml new file mode 100644 index 000000000..6431eaf81 --- /dev/null +++ b/experimental/test/aws-s3/yaks-config.yaml @@ -0,0 +1,69 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +config: + namespace: + temporary: false + runtime: + testcontainers: + enabled: true + env: + - name: YAKS_CAMEL_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_CAMELK_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_KAMELETS_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_KUBERNETES_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_KNATIVE_AUTO_REMOVE_RESOURCES + value: false + - name: YAKS_TESTCONTAINERS_AUTO_REMOVE_RESOURCES + value: false + - name: CITRUS_TYPE_CONVERTER + value: camel + resources: + - amazonS3Client.groovy + - aws-s3-to-knative.yaml + cucumber: + tags: + - "not @ignored" + settings: + loggers: + - name: Logger.Message_IN + level: DEBUG + - name: Logger.Message_OUT + level: DEBUG + dependencies: + - groupId: com.amazonaws + artifactId: aws-java-sdk-kinesis + version: "@aws-java-sdk.version@" + - groupId: org.apache.camel + artifactId: camel-aws2-s3 + version: "@camel.version@" + - groupId: org.apache.camel + artifactId: camel-jackson + version: "@camel.version@" + dump: + enabled: true + failedOnly: true + includes: + - app=camel-k +pre: + - name: Install experimental Kamelets + run: | + kubectl apply -f ../../aws-s3-source.exp.kamelet.yaml -n $YAKS_NAMESPACE diff --git a/test/aws-s3/yaks-config.yaml b/test/aws-s3/yaks-config.yaml index 6f1a0d0d4..a2831684e 100644 --- a/test/aws-s3/yaks-config.yaml +++ b/test/aws-s3/yaks-config.yaml @@ -42,7 +42,6 @@ config: - aws-s3-to-log-uri-based.groovy - aws-s3-to-log-secret-based.groovy - aws-s3-uri-binding.yaml - - aws-s3-to-knative.yaml - aws-s3-to-knative-channel.yaml - ../utils/knative-channel-to-log.yaml cucumber: From df62f1ae10cfada833b2e3eea3b037eb2e4ccd04 Mon Sep 17 00:00:00 2001 From: Christoph Deppisch Date: Thu, 1 Dec 2022 15:50:32 +0100 Subject: [PATCH 28/28] Include experimental Kamelets in the catalog --- .github/workflows/yaks-tests.yaml | 9 +- .../aws-ddb-experimental-sink.kamelet.yaml | 10 +- .../aws-s3-experimental-source.kamelet.yaml | 8 +- .../aws-ddb-experimental-sink.kamelet.yaml | 148 ++++++++++++++++ .../aws-s3-experimental-source.kamelet.yaml | 167 ++++++++++++++++++ .../aws-ddb-sink-exp}/amazonDDBClient.groovy | 0 .../aws-ddb-sink-binding.yaml | 4 +- .../aws-ddb-sink-deleteItem.feature | 10 +- .../aws-ddb-sink-putItem.feature | 10 +- .../aws-ddb-sink-updateItem.feature | 10 +- .../aws-ddb-sink-exp}/putItem.groovy | 0 .../aws-ddb-sink-exp}/verifyItems.groovy | 0 .../aws-ddb-sink-exp}/yaks-config.yaml | 4 - .../aws-s3-exp}/amazonS3Client.groovy | 0 .../aws-s3-exp}/aws-s3-cloudevents.feature | 10 +- .../aws-s3-exp}/aws-s3-knative.feature | 10 +- .../aws-s3-exp}/aws-s3-to-knative.yaml | 4 +- .../experimental/aws-s3-exp}/yaks-config.yaml | 4 - 18 files changed, 359 insertions(+), 49 deletions(-) rename experimental/aws-ddb-sink.exp.kamelet.yaml => kamelets/aws-ddb-experimental-sink.kamelet.yaml (95%) rename experimental/aws-s3-source.exp.kamelet.yaml => kamelets/aws-s3-experimental-source.kamelet.yaml (96%) create mode 100644 library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml create mode 100644 library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/amazonDDBClient.groovy (100%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-binding.yaml (95%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-deleteItem.feature (87%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-putItem.feature (86%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/aws-ddb-sink-updateItem.feature (89%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/putItem.groovy (100%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/verifyItems.groovy (100%) rename {experimental/test/aws-ddb-sink => test/experimental/aws-ddb-sink-exp}/yaks-config.yaml (93%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/amazonS3Client.groovy (100%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/aws-s3-cloudevents.feature (86%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/aws-s3-knative.feature (85%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/aws-s3-to-knative.yaml (95%) rename {experimental/test/aws-s3 => test/experimental/aws-s3-exp}/yaks-config.yaml (94%) diff --git a/.github/workflows/yaks-tests.yaml b/.github/workflows/yaks-tests.yaml index 7f168ca25..a398b1a7f 100644 --- a/.github/workflows/yaks-tests.yaml +++ b/.github/workflows/yaks-tests.yaml @@ -65,7 +65,6 @@ jobs: # Overwrite JitPack coordinates in the local Kamelets so the tests can use the utility classes in this PR find kamelets -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} + - find experimental -maxdepth 1 -name '*.kamelet.yaml' -exec sed -i "s/github:apache.camel-kamelets:camel-kamelets-utils:${BASE_REF}-SNAPSHOT/github:${HEAD_REPO/\//.}:camel-kamelets-utils:${HEAD_REF//\//'~'}-SNAPSHOT/g" {} + - name: Get Camel K CLI run: | curl --fail -L --silent https://github.com/apache/camel-k/releases/download/v${CAMEL_K_VERSION}/camel-k-client-${CAMEL_K_VERSION}-linux-64bit.tar.gz -o kamel.tar.gz @@ -110,7 +109,7 @@ jobs: yaks install --operator-image $YAKS_IMAGE_NAME:$YAKS_VERSION - name: YAKS Tests run: | - echo "Running tests" + echo "Running tests for Kamelets" yaks run test/aws-ddb-sink $YAKS_RUN_OPTIONS yaks run test/aws-s3 $YAKS_RUN_OPTIONS @@ -122,11 +121,11 @@ jobs: yaks run test/earthquake-source $YAKS_RUN_OPTIONS yaks run test/rest-openapi-sink $YAKS_RUN_OPTIONS yaks run test/kafka $YAKS_RUN_OPTIONS - - name: YAKS Tests on experimental Kamelets + - name: YAKS Tests experimental Kamelets run: | echo "Running tests for experimental Kamelets" - yaks run experimental/test/aws-ddb-sink $YAKS_RUN_OPTIONS - yaks run experimental/test/aws-s3 $YAKS_RUN_OPTIONS + yaks run test/experimental/aws-ddb-sink-exp $YAKS_RUN_OPTIONS + yaks run test/experimental/aws-s3-exp $YAKS_RUN_OPTIONS - name: YAKS Report if: failure() run: | diff --git a/experimental/aws-ddb-sink.exp.kamelet.yaml b/kamelets/aws-ddb-experimental-sink.kamelet.yaml similarity index 95% rename from experimental/aws-ddb-sink.exp.kamelet.yaml rename to kamelets/aws-ddb-experimental-sink.kamelet.yaml index e19185fa0..a98ecb446 100644 --- a/experimental/aws-ddb-sink.exp.kamelet.yaml +++ b/kamelets/aws-ddb-experimental-sink.kamelet.yaml @@ -18,9 +18,9 @@ apiVersion: camel.apache.org/v1alpha1 kind: Kamelet metadata: - name: aws-ddb-sink-experimental + name: aws-ddb-experimental-sink annotations: - camel.apache.org/kamelet.support.level: "Experiemental" + camel.apache.org/kamelet.support.level: "Experimental" camel.apache.org/catalog.version: "main-SNAPSHOT" camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNMzkuNTA2IDEzLjY2N2gyMC4zMjF2NzEuOTk5SDM5LjUwNnpNNzQuMTMxIDY3LjU2NFY3OC41Mmw3LjQ4LTguNzY0di03LjYwMmwtNy40MzcgNS4zOTd6TTc0LjEzMSA2Mi45MzZsLjA0My0uMDEgNy40MzctNHYtNy42NDlsLTcuNDguNjg4ek03NC4xNzQgMzYuNDI5bC0uMDQzLS4wMVY0Ny4zNWw3LjQ4LjY5OXYtNy42NDV6Ii8+PHBhdGggZmlsbD0iIzFBNDc2RiIgZD0iTTgxLjYxMSA0OC4wNDlsLTcuNDgtLjY5OS0xNC4zMDMtLjU3MkgzOS41MDZsLTE0LjMwMy41NzJWMzYuNDQzbC0uMDE1LjAwOC4wMTUtLjAzMiAxNC4zMDMtMy4zMTRINTkuODI4bDE0LjMwMyAzLjMxNCA1LjI1OCAyLjc5NXYtMS43OTdsMi4yMjItLjI0My03LjQ4LTUuNDEtMTQuMzAzLTQuNDMySDM5LjUwNmwtMTQuMzAzIDQuNDMyVjIwLjgwN2wtNy40OCA4Ljc2M3Y3LjY1M2wuMDU4LS4wNDIgMi4xNjQuMjM2djEuODM0bC0yLjIyMiAxLjE4OXY3LjYxNWwuMDU4LS4wMDYgMi4xNjQuMDMydjMuMTk2bC0xLjg2Ny4wMjgtLjM1NS0uMDM0djcuNjE4bDIuMjIyIDEuMTk1djEuODU1bC0yLjEyOS4yMzUtLjA5My0uMDd2Ny42NTJsNy40OCA4Ljc2NFY2Ny41NjRsMTQuMzAzIDQuNDMySDU5LjgyOGwxNC4zNDUtNC40NDUgNy40MzgtNS4zNjctMi4yMjItLjI0NXYtMS44MThsLTUuMjE2IDIuODA1LTE0LjM0NSAzLjI5NXYuMDA0SDM5LjUwNnYtLjAwNGwtMTQuMzQ4LTMuMjk1LS4wMjUtLjA1MS4wNy4wMzdWNTEuOTY1bDE0LjMwMy41N3YuMDE0SDU5LjgyOHYtLjAxNGwxNC4zMDMtLjU3IDcuNDgtLjY1Ni0yLjIyMi0uMDMydi0zLjE5NnoiLz48L3N2Zz4=" camel.apache.org/provider: "Apache Software Foundation" @@ -29,7 +29,7 @@ metadata: camel.apache.org/kamelet.type: "sink" spec: definition: - title: "AWS DynamoDB Sink" + title: "AWS DynamoDB Experimental Sink" description: |- Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table. @@ -37,7 +37,9 @@ spec: If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method. - This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. + This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. + + This Kamelet supports experimental input format to specify the data type that that is given to this sink. The Kamelet will do best effort to convert the provided input type to the required input for the sink. required: - table - region diff --git a/experimental/aws-s3-source.exp.kamelet.yaml b/kamelets/aws-s3-experimental-source.kamelet.yaml similarity index 96% rename from experimental/aws-s3-source.exp.kamelet.yaml rename to kamelets/aws-s3-experimental-source.kamelet.yaml index 7a8d8fe58..504157c16 100644 --- a/experimental/aws-s3-source.exp.kamelet.yaml +++ b/kamelets/aws-s3-experimental-source.kamelet.yaml @@ -1,7 +1,7 @@ apiVersion: camel.apache.org/v1alpha1 kind: Kamelet metadata: - name: aws-s3-source-experimental + name: aws-s3-experimental-source annotations: camel.apache.org/kamelet.support.level: "Experimental" camel.apache.org/catalog.version: "main-SNAPSHOT" @@ -12,7 +12,7 @@ metadata: camel.apache.org/kamelet.type: "source" spec: definition: - title: "AWS S3 Source" + title: "AWS S3 Experimental Source" description: |- Receive data from an Amazon S3 Bucket. @@ -20,7 +20,9 @@ spec: If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method. - Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name + Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name. + + This Kamelet supports experimental output format to specify the data type produced by this source. Users of the Kamelet are able to choose from different output types. required: - bucketNameOrArn - region diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml new file mode 100644 index 000000000..a98ecb446 --- /dev/null +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-ddb-experimental-sink.kamelet.yaml @@ -0,0 +1,148 @@ +# --------------------------------------------------------------------------- +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# --------------------------------------------------------------------------- + +apiVersion: camel.apache.org/v1alpha1 +kind: Kamelet +metadata: + name: aws-ddb-experimental-sink + annotations: + camel.apache.org/kamelet.support.level: "Experimental" + camel.apache.org/catalog.version: "main-SNAPSHOT" + camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyBoZWlnaHQ9IjEwMCIgd2lkdGg9IjEwMCIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNNzQuMTc0IDMxLjgwN2w3LjQzNyA1LjM2N3YtNy42MDJsLTcuNDgtOC43NjV2MTAuOTU3bC4wNDMuMDE1eiIvPjxwYXRoIGZpbGw9IiM1Mjk0Q0YiIGQ9Ik01OS44MzggODUuNjY2bDE0LjI5My03LjE0NlYyMC43OTFsLTE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMjA1Qjk4IiBkPSJNMzkuNDk2IDg1LjY2NkwyNS4yMDMgNzguNTJWMjAuNzkxbDE0LjMwMy03LjEyNHoiLz48cGF0aCBmaWxsPSIjMkQ3MkI4IiBkPSJNMzkuNTA2IDEzLjY2N2gyMC4zMjF2NzEuOTk5SDM5LjUwNnpNNzQuMTMxIDY3LjU2NFY3OC41Mmw3LjQ4LTguNzY0di03LjYwMmwtNy40MzcgNS4zOTd6TTc0LjEzMSA2Mi45MzZsLjA0My0uMDEgNy40MzctNHYtNy42NDlsLTcuNDguNjg4ek03NC4xNzQgMzYuNDI5bC0uMDQzLS4wMVY0Ny4zNWw3LjQ4LjY5OXYtNy42NDV6Ii8+PHBhdGggZmlsbD0iIzFBNDc2RiIgZD0iTTgxLjYxMSA0OC4wNDlsLTcuNDgtLjY5OS0xNC4zMDMtLjU3MkgzOS41MDZsLTE0LjMwMy41NzJWMzYuNDQzbC0uMDE1LjAwOC4wMTUtLjAzMiAxNC4zMDMtMy4zMTRINTkuODI4bDE0LjMwMyAzLjMxNCA1LjI1OCAyLjc5NXYtMS43OTdsMi4yMjItLjI0My03LjQ4LTUuNDEtMTQuMzAzLTQuNDMySDM5LjUwNmwtMTQuMzAzIDQuNDMyVjIwLjgwN2wtNy40OCA4Ljc2M3Y3LjY1M2wuMDU4LS4wNDIgMi4xNjQuMjM2djEuODM0bC0yLjIyMiAxLjE4OXY3LjYxNWwuMDU4LS4wMDYgMi4xNjQuMDMydjMuMTk2bC0xLjg2Ny4wMjgtLjM1NS0uMDM0djcuNjE4bDIuMjIyIDEuMTk1djEuODU1bC0yLjEyOS4yMzUtLjA5My0uMDd2Ny42NTJsNy40OCA4Ljc2NFY2Ny41NjRsMTQuMzAzIDQuNDMySDU5LjgyOGwxNC4zNDUtNC40NDUgNy40MzgtNS4zNjctMi4yMjItLjI0NXYtMS44MThsLTUuMjE2IDIuODA1LTE0LjM0NSAzLjI5NXYuMDA0SDM5LjUwNnYtLjAwNGwtMTQuMzQ4LTMuMjk1LS4wMjUtLjA1MS4wNy4wMzdWNTEuOTY1bDE0LjMwMy41N3YuMDE0SDU5LjgyOHYtLjAxNGwxNC4zMDMtLjU3IDcuNDgtLjY1Ni0yLjIyMi0uMDMydi0zLjE5NnoiLz48L3N2Zz4=" + camel.apache.org/provider: "Apache Software Foundation" + camel.apache.org/kamelet.group: "AWS DynamoDB Streams" + labels: + camel.apache.org/kamelet.type: "sink" +spec: + definition: + title: "AWS DynamoDB Experimental Sink" + description: |- + Send data to Amazon DynamoDB. The sent data inserts, updates, or deletes an item on the specified AWS DynamoDB table. + + The basic authentication method for the AWS DynamoDB service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider. + + If you use the default credentials provider, the DynamoDB client loads the credentials through this provider and doesn't use the basic authentication method. + + This Kamelet expects a JSON-formatted body and it must include the primary key values that define the DynamoDB item. The mapping between the JSON fields and table attribute values is done by key. For example, for '{"username":"oscerd", "city":"Rome"}' input, the Kamelet inserts or update an item in the specified AWS DynamoDB table and sets the values for the 'username' and 'city' attributes. + + This Kamelet supports experimental input format to specify the data type that that is given to this sink. The Kamelet will do best effort to convert the provided input type to the required input for the sink. + required: + - table + - region + type: object + properties: + table: + title: Table + description: The name of the DynamoDB table. + type: string + accessKey: + title: Access Key + description: The access key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + secretKey: + title: Secret Key + description: The secret key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + region: + title: AWS Region + description: The AWS region to access. + type: string + enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "aws-cn-global", "aws-us-gov-global", "aws-iso-global", "aws-iso-b-global"] + operation: + title: Operation + description: "The operation to perform. The options are PutItem, UpdateItem, or DeleteItem." + type: string + default: PutItem + example: PutItem + writeCapacity: + title: Write Capacity + description: The provisioned throughput to reserve for writing resources to your table. + type: integer + default: 1 + useDefaultCredentialsProvider: + title: Default Credentials Provider + description: If true, the DynamoDB client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key). + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + uriEndpointOverride: + title: Overwrite Endpoint URI + description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option. + type: string + overrideEndpoint: + title: Endpoint Overwrite + description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + inputFormat: + title: Input Type + description: Specify the input type for this Kamelet. The Kamelet will automatically apply conversion logic in order to transform message content to this data type. + type: string + default: json + example: json + types: + in: + mediaType: application/json + dependencies: + - github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT + - "camel:core" + - "camel:jackson" + - "camel:aws2-ddb" + - "camel:kamelet" + template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: inputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-ddb' + - key: format + value: '{{inputFormat}}' + - key: registry + value: '#bean:{{dataTypeRegistry}}' + from: + uri: "kamelet:source" + steps: + - set-property: + name: operation + constant: "{{operation}}" + - process: + ref: "{{inputTypeProcessor}}" + - to: + uri: "aws2-ddb:{{table}}" + parameters: + secretKey: "{{?secretKey}}" + accessKey: "{{?accessKey}}" + region: "{{region}}" + operation: "{{operation}}" + writeCapacity: "{{?writeCapacity}}" + useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}" + uriEndpointOverride: "{{?uriEndpointOverride}}" + overrideEndpoint: "{{overrideEndpoint}}" diff --git a/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml new file mode 100644 index 000000000..504157c16 --- /dev/null +++ b/library/camel-kamelets/src/main/resources/kamelets/aws-s3-experimental-source.kamelet.yaml @@ -0,0 +1,167 @@ +apiVersion: camel.apache.org/v1alpha1 +kind: Kamelet +metadata: + name: aws-s3-experimental-source + annotations: + camel.apache.org/kamelet.support.level: "Experimental" + camel.apache.org/catalog.version: "main-SNAPSHOT" + camel.apache.org/kamelet.icon: "data:image/svg+xml;base64,PHN2ZyB2ZXJzaW9uPSIxLjEiIGlkPSJMYXllcl8xIiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHg9IjAiIHk9IjAiIHZpZXdCb3g9IjAgMCAyNDguMiAzMDAiIHhtbDpzcGFjZT0icHJlc2VydmUiPjxzdHlsZT4uc3QyOHtmaWxsOiM4YzMxMjN9LnN0Mjl7ZmlsbDojZTA1MjQzfTwvc3R5bGU+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik0yMCA1Mi4xTDAgNjJ2MTc1LjVsMjAgOS45LjEtLjFWNTIuMmwtLjEtLjEiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNyAyMjJMMjAgMjQ3LjVWNTIuMUwxMjcgNzd2MTQ1Ii8+PHBhdGggY2xhc3M9InN0MjgiIGQ9Ik03OC43IDE4Mi4xbDQ1LjQgNS44LjMtLjcuMy03NC40LS41LS42LTQ1LjQgNS43LS4xIDY0LjIiLz48cGF0aCBjbGFzcz0ic3QyOCIgZD0iTTEyNC4xIDIyMi4zbDEwNC4xIDI1LjIuMi0uM1Y1Mi4xbC0uMi0uMi0xMDQuMSAyNS40djE0NSIvPjxwYXRoIGNsYXNzPSJzdDI5IiBkPSJNMTY5LjUgMTgyLjFsLTQ1LjQgNS44di03NS43bDQ1LjQgNS43djY0LjIiLz48cGF0aCBkPSJNMTY5LjUgODYuOWwtNDUuNCA4LjMtNDUuNC04LjNMMTI0IDc1bDQ1LjUgMTEuOSIgZmlsbD0iIzVlMWYxOCIvPjxwYXRoIGQ9Ik0xNjkuNSAyMTMuMWwtNDUuNC04LjMtNDUuNCA4LjMgNDUuMyAxMi43IDQ1LjUtMTIuNyIgZmlsbD0iI2YyYjBhOSIvPjxwYXRoIGNsYXNzPSJzdDI4IiBkPSJNNzguNyA4Ni45bDQ1LjQtMTEuMi40LS4xVi4zbC0uNC0uMy00NS40IDIyLjd2NjQuMiIvPjxwYXRoIGNsYXNzPSJzdDI5IiBkPSJNMTY5LjUgODYuOWwtNDUuNC0xMS4yVjBsNDUuNCAyMi43djY0LjIiLz48cGF0aCBjbGFzcz0ic3QyOCIgZD0iTTEyNC4xIDMwMGwtNDUuNC0yMi43di02NC4ybDQ1LjQgMTEuMi43LjgtLjIgNzMuNi0uNSAxLjMiLz48cGF0aCBjbGFzcz0ic3QyOSIgZD0iTTEyNC4xIDMwMGw0NS40LTIyLjd2LTY0LjJsLTQ1LjQgMTEuMlYzMDBNMjI4LjIgNTIuMWwyMCAxMHYxNzUuNWwtMjAgMTBWNTIuMSIvPjwvc3ZnPg==" + camel.apache.org/provider: "Apache Software Foundation" + camel.apache.org/kamelet.group: "AWS S3" + labels: + camel.apache.org/kamelet.type: "source" +spec: + definition: + title: "AWS S3 Experimental Source" + description: |- + Receive data from an Amazon S3 Bucket. + + The basic authentication method for the S3 service is to specify an access key and a secret key. These parameters are optional because the Kamelet provides a default credentials provider. + + If you use the default credentials provider, the S3 client loads the credentials through this provider and doesn't use the basic authentication method. + + Two headers will be duplicated with different names for clarity at sink level, CamelAwsS3Key will be duplicated into aws.s3.key and CamelAwsS3BucketName will be duplicated in aws.s3.bucket.name. + + This Kamelet supports experimental output format to specify the data type produced by this source. Users of the Kamelet are able to choose from different output types. + required: + - bucketNameOrArn + - region + type: object + properties: + bucketNameOrArn: + title: Bucket Name + description: The S3 Bucket name or Amazon Resource Name (ARN). + type: string + deleteAfterRead: + title: Auto-delete Objects + description: Specifies to delete objects after consuming them. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: true + accessKey: + title: Access Key + description: The access key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + secretKey: + title: Secret Key + description: The secret key obtained from AWS. + type: string + format: password + x-descriptors: + - urn:alm:descriptor:com.tectonic.ui:password + - urn:camel:group:credentials + region: + title: AWS Region + description: The AWS region to access. + type: string + enum: ["ap-south-1", "eu-south-1", "us-gov-east-1", "me-central-1", "ca-central-1", "eu-central-1", "us-iso-west-1", "us-west-1", "us-west-2", "af-south-1", "eu-north-1", "eu-west-3", "eu-west-2", "eu-west-1", "ap-northeast-3", "ap-northeast-2", "ap-northeast-1", "me-south-1", "sa-east-1", "ap-east-1", "cn-north-1", "us-gov-west-1", "ap-southeast-1", "ap-southeast-2", "us-iso-east-1", "ap-southeast-3", "us-east-1", "us-east-2", "cn-northwest-1", "us-isob-east-1", "aws-global", "aws-cn-global", "aws-us-gov-global", "aws-iso-global", "aws-iso-b-global"] + autoCreateBucket: + title: Autocreate Bucket + description: Specifies to automatically create the S3 bucket. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + includeBody: + title: Include Body + description: If true, the exchange is consumed and put into the body and closed. If false, the S3Object stream is put raw into the body and the headers are set with the S3 object metadata. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: true + prefix: + title: Prefix + description: The AWS S3 bucket prefix to consider while searching. + type: string + example: 'folder/' + ignoreBody: + title: Ignore Body + description: If true, the S3 Object body is ignored. Setting this to true overrides any behavior defined by the `includeBody` option. If false, the S3 object is put in the body. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + useDefaultCredentialsProvider: + title: Default Credentials Provider + description: If true, the S3 client loads credentials through a default credentials provider. If false, it uses the basic authentication method (access key and secret key). + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + uriEndpointOverride: + title: Overwrite Endpoint URI + description: The overriding endpoint URI. To use this option, you must also select the `overrideEndpoint` option. + type: string + overrideEndpoint: + title: Endpoint Overwrite + description: Select this option to override the endpoint URI. To use this option, you must also provide a URI for the `uriEndpointOverride` option. + type: boolean + x-descriptors: + - 'urn:alm:descriptor:com.tectonic.ui:checkbox' + default: false + delay: + title: Delay + description: The number of milliseconds before the next poll of the selected bucket. + type: integer + default: 500 + outputFormat: + title: Output Type + description: Choose the output type for this Kamelet. The Kamelet supports different output types and performs automatic message conversion according to this data type. + type: string + default: binary + example: binary + dependencies: + - "camel:core" + - "camel:aws2-s3" + - "github:apache.camel-kamelets:camel-kamelets-utils:main-SNAPSHOT" + - "camel:kamelet" + template: + beans: + - name: dataTypeRegistry + type: "#class:org.apache.camel.kamelets.utils.format.DefaultDataTypeRegistry" + - name: outputTypeProcessor + type: "#class:org.apache.camel.kamelets.utils.format.DataTypeProcessor" + property: + - key: scheme + value: 'aws2-s3' + - key: format + value: '{{outputFormat}}' + - key: registry + value: '#bean:{{dataTypeRegistry}}' + - name: renameHeaders + type: "#class:org.apache.camel.kamelets.utils.headers.DuplicateNamingHeaders" + property: + - key: prefix + value: 'CamelAwsS3' + - key: renamingPrefix + value: 'aws.s3.' + - key: mode + value: 'filtering' + - key: selectedHeaders + value: 'CamelAwsS3Key,CamelAwsS3BucketName' + from: + uri: "aws2-s3:{{bucketNameOrArn}}" + parameters: + autoCreateBucket: "{{autoCreateBucket}}" + secretKey: "{{?secretKey}}" + accessKey: "{{?accessKey}}" + region: "{{region}}" + includeBody: "{{includeBody}}" + ignoreBody: "{{ignoreBody}}" + deleteAfterRead: "{{deleteAfterRead}}" + prefix: "{{?prefix}}" + useDefaultCredentialsProvider: "{{useDefaultCredentialsProvider}}" + uriEndpointOverride: "{{?uriEndpointOverride}}" + overrideEndpoint: "{{overrideEndpoint}}" + delay: "{{delay}}" + steps: + - process: + ref: "{{renameHeaders}}" + - process: + ref: "{{outputTypeProcessor}}" + - to: "kamelet:sink" diff --git a/experimental/test/aws-ddb-sink/amazonDDBClient.groovy b/test/experimental/aws-ddb-sink-exp/amazonDDBClient.groovy similarity index 100% rename from experimental/test/aws-ddb-sink/amazonDDBClient.groovy rename to test/experimental/aws-ddb-sink-exp/amazonDDBClient.groovy diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml similarity index 95% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml index 6b4b2b024..d1e5fb440 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-binding.yaml +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-binding.yaml @@ -18,7 +18,7 @@ apiVersion: camel.apache.org/v1alpha1 kind: KameletBinding metadata: - name: aws-ddb-sink-binding + name: aws-ddb-experimental-sink-binding spec: source: ref: @@ -39,7 +39,7 @@ spec: ref: kind: Kamelet apiVersion: camel.apache.org/v1alpha1 - name: aws-ddb-sink-experimental + name: aws-ddb-experimental-sink properties: table: ${aws.ddb.tableName} operation: ${aws.ddb.operation} diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature similarity index 87% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature index 6c54fdc36..d535b82f7 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-deleteItem.feature +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-deleteItem.feature @@ -18,7 +18,7 @@ Feature: AWS DDB Sink - DeleteItem Background: - Given Kamelet aws-ddb-sink-experimental is available + Given Kamelet aws-ddb-experimental-sink is available Given Camel K resource polling configuration | maxAttempts | 200 | | delayBetweenAttempts | 2000 | @@ -48,9 +48,9 @@ Feature: AWS DDB Sink - DeleteItem Scenario: Create AWS-DDB Kamelet sink binding When load KameletBinding aws-ddb-sink-binding.yaml - And KameletBinding aws-ddb-sink-binding is available - And Camel K integration aws-ddb-sink-binding is running - And Camel K integration aws-ddb-sink-binding should print Routes startup + And KameletBinding aws-ddb-experimental-sink-binding is available + And Camel K integration aws-ddb-experimental-sink-binding is running + And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup Then sleep 10sec Scenario: Verify Kamelet sink @@ -59,7 +59,7 @@ Feature: AWS DDB Sink - DeleteItem Then run script verifyItems.groovy Scenario: Remove Camel K resources - Given delete KameletBinding aws-ddb-sink-binding + Given delete KameletBinding aws-ddb-experimental-sink-binding Scenario: Stop container Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature similarity index 86% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature index f117889b9..637b1dab7 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-putItem.feature +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-putItem.feature @@ -18,7 +18,7 @@ Feature: AWS DDB Sink - PutItem Background: - Given Kamelet aws-ddb-sink-experimental is available + Given Kamelet aws-ddb-experimental-sink is available Given Camel K resource polling configuration | maxAttempts | 200 | | delayBetweenAttempts | 2000 | @@ -43,16 +43,16 @@ Feature: AWS DDB Sink - PutItem Scenario: Create AWS-DDB Kamelet sink binding When load KameletBinding aws-ddb-sink-binding.yaml - And KameletBinding aws-ddb-sink-binding is available - And Camel K integration aws-ddb-sink-binding is running - And Camel K integration aws-ddb-sink-binding should print Routes startup + And KameletBinding aws-ddb-experimental-sink-binding is available + And Camel K integration aws-ddb-experimental-sink-binding is running + And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup Then sleep 10sec Scenario: Verify Kamelet sink Then run script verifyItems.groovy Scenario: Remove Camel K resources - Given delete KameletBinding aws-ddb-sink-binding + Given delete KameletBinding aws-ddb-experimental-sink-binding Scenario: Stop container Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature similarity index 89% rename from experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature rename to test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature index 215adbe21..5a0a29c1c 100644 --- a/experimental/test/aws-ddb-sink/aws-ddb-sink-updateItem.feature +++ b/test/experimental/aws-ddb-sink-exp/aws-ddb-sink-updateItem.feature @@ -18,7 +18,7 @@ Feature: AWS DDB Sink - UpdateItem Background: - Given Kamelet aws-ddb-sink-experimental is available + Given Kamelet aws-ddb-experimental-sink is available Given Camel K resource polling configuration | maxAttempts | 200 | | delayBetweenAttempts | 2000 | @@ -50,9 +50,9 @@ Feature: AWS DDB Sink - UpdateItem Scenario: Create AWS-DDB Kamelet sink binding When load KameletBinding aws-ddb-sink-binding.yaml - And KameletBinding aws-ddb-sink-binding is available - And Camel K integration aws-ddb-sink-binding is running - And Camel K integration aws-ddb-sink-binding should print Routes startup + And KameletBinding aws-ddb-experimental-sink-binding is available + And Camel K integration aws-ddb-experimental-sink-binding is running + And Camel K integration aws-ddb-experimental-sink-binding should print Routes startup Then sleep 10sec Scenario: Verify Kamelet sink @@ -62,7 +62,7 @@ Feature: AWS DDB Sink - UpdateItem Then run script verifyItems.groovy Scenario: Remove Camel K resources - Given delete KameletBinding aws-ddb-sink-binding + Given delete KameletBinding aws-ddb-experimental-sink-binding Scenario: Stop container Given stop LocalStack container diff --git a/experimental/test/aws-ddb-sink/putItem.groovy b/test/experimental/aws-ddb-sink-exp/putItem.groovy similarity index 100% rename from experimental/test/aws-ddb-sink/putItem.groovy rename to test/experimental/aws-ddb-sink-exp/putItem.groovy diff --git a/experimental/test/aws-ddb-sink/verifyItems.groovy b/test/experimental/aws-ddb-sink-exp/verifyItems.groovy similarity index 100% rename from experimental/test/aws-ddb-sink/verifyItems.groovy rename to test/experimental/aws-ddb-sink-exp/verifyItems.groovy diff --git a/experimental/test/aws-ddb-sink/yaks-config.yaml b/test/experimental/aws-ddb-sink-exp/yaks-config.yaml similarity index 93% rename from experimental/test/aws-ddb-sink/yaks-config.yaml rename to test/experimental/aws-ddb-sink-exp/yaks-config.yaml index 51cf3b527..15156f088 100644 --- a/experimental/test/aws-ddb-sink/yaks-config.yaml +++ b/test/experimental/aws-ddb-sink-exp/yaks-config.yaml @@ -56,7 +56,3 @@ config: failedOnly: true includes: - app=camel-k -pre: - - name: Install experimental Kamelets - run: | - kubectl apply -f ../../aws-ddb-sink.exp.kamelet.yaml -n $YAKS_NAMESPACE diff --git a/experimental/test/aws-s3/amazonS3Client.groovy b/test/experimental/aws-s3-exp/amazonS3Client.groovy similarity index 100% rename from experimental/test/aws-s3/amazonS3Client.groovy rename to test/experimental/aws-s3-exp/amazonS3Client.groovy diff --git a/experimental/test/aws-s3/aws-s3-cloudevents.feature b/test/experimental/aws-s3-exp/aws-s3-cloudevents.feature similarity index 86% rename from experimental/test/aws-s3/aws-s3-cloudevents.feature rename to test/experimental/aws-s3-exp/aws-s3-cloudevents.feature index 6f5513fc2..2ce2d0d60 100644 --- a/experimental/test/aws-s3/aws-s3-cloudevents.feature +++ b/test/experimental/aws-s3-exp/aws-s3-cloudevents.feature @@ -3,7 +3,7 @@ Feature: AWS S3 Kamelet - cloud events data type Background: - Given Kamelet aws-s3-source-experimental is available + Given Kamelet aws-s3-experimental-source is available Given Knative event consumer timeout is 20000 ms Given Camel K resource polling configuration | maxAttempts | 200 | @@ -30,9 +30,9 @@ Feature: AWS S3 Kamelet - cloud events data type Scenario: Create AWS-S3 Kamelet to Knative binding Given variable loginfo is "Installed features" When load KameletBinding aws-s3-to-knative.yaml - And KameletBinding aws-s3-to-knative is available - And Camel K integration aws-s3-to-knative is running - Then Camel K integration aws-s3-to-knative should print ${loginfo} + And KameletBinding aws-s3-to-knative-binding is available + And Camel K integration aws-s3-to-knative-binding is running + Then Camel K integration aws-s3-to-knative-binding should print ${loginfo} Scenario: Verify Kamelet source Given create Knative event consumer service event-consumer-service @@ -48,7 +48,7 @@ Feature: AWS S3 Kamelet - cloud events data type | id | @ignore@ | Scenario: Remove Camel K resources - Given delete KameletBinding aws-s3-to-knative + Given delete KameletBinding aws-s3-to-knative-binding Given delete Kubernetes service event-consumer-service Scenario: Stop container diff --git a/experimental/test/aws-s3/aws-s3-knative.feature b/test/experimental/aws-s3-exp/aws-s3-knative.feature similarity index 85% rename from experimental/test/aws-s3/aws-s3-knative.feature rename to test/experimental/aws-s3-exp/aws-s3-knative.feature index 8a6512a9c..bb1bebd39 100644 --- a/experimental/test/aws-s3/aws-s3-knative.feature +++ b/test/experimental/aws-s3-exp/aws-s3-knative.feature @@ -3,7 +3,7 @@ Feature: AWS S3 Kamelet - Knative binding Background: - Given Kamelet aws-s3-source-experimental is available + Given Kamelet aws-s3-experimental-source is available Given Knative event consumer timeout is 20000 ms Given Camel K resource polling configuration | maxAttempts | 200 | @@ -30,9 +30,9 @@ Feature: AWS S3 Kamelet - Knative binding Scenario: Create AWS-S3 Kamelet to Knative binding Given variable loginfo is "Installed features" When load KameletBinding aws-s3-to-knative.yaml - And KameletBinding aws-s3-to-knative is available - And Camel K integration aws-s3-to-knative is running - Then Camel K integration aws-s3-to-knative should print ${loginfo} + And KameletBinding aws-s3-to-knative-binding is available + And Camel K integration aws-s3-to-knative-binding is running + Then Camel K integration aws-s3-to-knative-binding should print ${loginfo} Scenario: Verify Kamelet source Given create Knative event consumer service event-consumer-service @@ -47,7 +47,7 @@ Feature: AWS S3 Kamelet - Knative binding | id | @ignore@ | Scenario: Remove Camel K resources - Given delete KameletBinding aws-s3-to-knative + Given delete KameletBinding aws-s3-to-knative-binding Given delete Kubernetes service event-consumer-service Given delete Knative broker default diff --git a/experimental/test/aws-s3/aws-s3-to-knative.yaml b/test/experimental/aws-s3-exp/aws-s3-to-knative.yaml similarity index 95% rename from experimental/test/aws-s3/aws-s3-to-knative.yaml rename to test/experimental/aws-s3-exp/aws-s3-to-knative.yaml index afa1b5725..117c33322 100644 --- a/experimental/test/aws-s3/aws-s3-to-knative.yaml +++ b/test/experimental/aws-s3-exp/aws-s3-to-knative.yaml @@ -18,13 +18,13 @@ apiVersion: camel.apache.org/v1alpha1 kind: KameletBinding metadata: - name: aws-s3-to-knative + name: aws-s3-to-knative-binding spec: source: ref: kind: Kamelet apiVersion: camel.apache.org/v1alpha1 - name: aws-s3-source-experimental + name: aws-s3-experimental-source properties: bucketNameOrArn: ${aws.s3.bucketNameOrArn} overrideEndpoint: true diff --git a/experimental/test/aws-s3/yaks-config.yaml b/test/experimental/aws-s3-exp/yaks-config.yaml similarity index 94% rename from experimental/test/aws-s3/yaks-config.yaml rename to test/experimental/aws-s3-exp/yaks-config.yaml index 6431eaf81..33d55aac4 100644 --- a/experimental/test/aws-s3/yaks-config.yaml +++ b/test/experimental/aws-s3-exp/yaks-config.yaml @@ -63,7 +63,3 @@ config: failedOnly: true includes: - app=camel-k -pre: - - name: Install experimental Kamelets - run: | - kubectl apply -f ../../aws-s3-source.exp.kamelet.yaml -n $YAKS_NAMESPACE