diff --git a/README.md b/README.md index 9f2f014..3f452e5 100644 --- a/README.md +++ b/README.md @@ -66,15 +66,15 @@ For complete instructions on generating the Java and subsequently using it to se You can configure this template by passing different parameters in the Generator CLI: `-p PARAM1_NAME=PARAM1_VALUE -p PARAM2_NAME=PARAM2_VALUE` -Name | Description | Required | Default ----|---|---|--- -`server` | Server must be defined in yaml and selected when using the generator | Yes | - -`user` | User for the server to generate code for. This can also be provided as an environment variable (see below) | No | app -`password` | Password for the server to generate code for. This can also be provided as an environment variable (see below) | No | passw0rd -`package` | Java package name for generated code | No | com.asyncapi -`mqTopicPrefix` | MQ topic prefix. Used for ibmmq protocols. Default will work with dev MQ instance | No | dev// -`asyncapiFileDir` | Custom output location of the AsyncAPI file that you provided as an input | No | The root of the output directory - +| Name | Description | Required | Default | +|-------------------|----------------------------------------------------------------------------------------------------------------|----------|----------------------------------| +| `server` | Server must be defined in yaml and selected when using the generator | Yes | - | +| `library` | The library to use. Current supported options: [`java`, `spring`] | No | java | +| `user` | User for the server to generate code for. This can also be provided as an environment variable (see below) | No | app | +| `password` | Password for the server to generate code for. This can also be provided as an environment variable (see below) | No | passw0rd | +| `package` | Java package name for generated code | No | com.asyncapi | +| `mqTopicPrefix` | MQ topic prefix. Used for ibmmq protocols. Default will work with dev MQ instance | No | dev// | +| `asyncapiFileDir` | Custom output location of the AsyncAPI file that you provided as an input | No | The root of the output directory | ## Environment variables diff --git a/components/Connection/NoOpConnection.js b/components/Connection/NoOpConnection.js new file mode 100644 index 0000000..bc74c51 --- /dev/null +++ b/components/Connection/NoOpConnection.js @@ -0,0 +1,19 @@ +/* +* (c) Copyright IBM Corporation 2021 +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +export function Connection() { + return null; +} diff --git a/components/Connection/index.js b/components/Connection/index.js index 1631c6f..2dc38c7 100644 --- a/components/Connection/index.js +++ b/components/Connection/index.js @@ -1,23 +1,31 @@ import { Connection as KafkaConnection } from './KafkaConnection'; import { Connection as MQConnection } from './MQConnection'; +import { Connection as NoOpConnection } from './NoOpConnection'; const connectionModuleMap = [ { protocols: ['ibmmq', 'ibmmq-secure'], + libraries: ['java'], module: MQConnection }, { protocols: ['kafka', 'kafka-secure'], + libraries: ['java'], module: KafkaConnection - } + }, + { + protocols: ['ibmmq', 'ibmmq-secure', 'kafka', 'kafka-secure'], + libraries: ['spring'], + module: NoOpConnection + }, ]; export default function({ asyncapi, params }) { const server = asyncapi.allServers().get(params.server); const protocol = server.protocol(); - const foundModule = connectionModuleMap.find(item => item.protocols.includes(protocol)); + const foundModule = connectionModuleMap.find(item => item.protocols.includes(protocol) && item.libraries.includes(params.library)); if (!foundModule) { - throw new Error(`This template does not currently support the protocol ${protocol}`); + throw new Error(`This template does not currently support the protocol ${protocol} and library ${params.library}`); } return foundModule.module(); } diff --git a/components/ConnectionHelper/NoOpConnectionHelper.js b/components/ConnectionHelper/NoOpConnectionHelper.js new file mode 100644 index 0000000..6fe7862 --- /dev/null +++ b/components/ConnectionHelper/NoOpConnectionHelper.js @@ -0,0 +1,19 @@ +/* +* (c) Copyright IBM Corporation 2021 +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +export function ConnectionHelper({ params }) { + return null; +} diff --git a/components/ConnectionHelper/index.js b/components/ConnectionHelper/index.js index 70939a2..18d1e68 100644 --- a/components/ConnectionHelper/index.js +++ b/components/ConnectionHelper/index.js @@ -1,23 +1,31 @@ import { ConnectionHelper as KafkaConnectionHelper } from './KafkaConnectionHelper'; import { ConnectionHelper as MQConnectionHelper } from './MQConnectionHelper'; +import { ConnectionHelper as NoOpConnectionHelper } from './NoOpConnectionHelper'; const connectionModuleMap = [ { protocols: ['ibmmq', 'ibmmq-secure'], + libraries: ['java'], module: MQConnectionHelper }, { protocols: ['kafka', 'kafka-secure'], + libraries: ['java'], module: KafkaConnectionHelper + }, + { + protocols: ['ibmmq', 'ibmmq-secure', 'kafka', 'kafka-secure'], + libraries: ['spring'], + module: NoOpConnectionHelper } ]; export default function({ asyncapi, params }) { const server = asyncapi.allServers().get(params.server); const protocol = server.protocol(); - const foundModule = connectionModuleMap.find(item => item.protocols.includes(protocol)); + const foundModule = connectionModuleMap.find(item => item.protocols.includes(protocol) && item.libraries.includes(params.library)); if (!foundModule) { - throw new Error(`This template does not currently support the protocol ${protocol}`); + throw new Error(`This template does not currently support the protocol ${protocol} and library ${params.library}`); } return foundModule.module({ asyncapi, params }); } diff --git a/components/Consumer/SpringKafkaConsumer.js b/components/Consumer/SpringKafkaConsumer.js new file mode 100644 index 0000000..9c68359 --- /dev/null +++ b/components/Consumer/SpringKafkaConsumer.js @@ -0,0 +1,59 @@ +/* +* (c) Copyright IBM Corporation 2021 +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ +import { toJavaClassName } from '../../utils/String.utils'; + +export function ConsumerDeclaration() { + return ` + private final String channelName; + `; +} + +export function ConsumerImports({ params, message }) { + const id = message.id() || message.name(); + return ` +import ${params.package}.PubSubBase; +import ${params.package}.models.ModelContract; +import ${params.package}.models.${toJavaClassName(id)}; + +import org.springframework.kafka.annotation.KafkaConsumer; +`; +} + +export function ReceiveMessage({ message }) { + const id = message.id() || message.name(); + let groupId = ''; + if (message.groupId) { + groupId = `, groupId = "${message.groupId}"`; + } + return ` + @KafkaConsumer(topics = channelName ${groupId}) + public void receive(${toJavaClassName(id)} ${id}) { + logger.info("Received message type: " + receivedObject.getClass().getName()); + } + } + `; +} + +export function ConsumerConstructor({ name }) { + return ` + super(); + this.channelName = "${name}"; + `; +} + +export function ConsumerClose() { + return ''; +} diff --git a/components/Consumer/index.js b/components/Consumer/index.js index da8a01b..a453baa 100644 --- a/components/Consumer/index.js +++ b/components/Consumer/index.js @@ -1,23 +1,31 @@ import * as MQConsumer from './MQConsumer'; import * as KafkaConsumer from './KafkaConsumer'; +import * as SpringKafkaConsumer from './SpringKafkaConsumer'; const consumerModuleMap = [ { protocols: ['ibmmq', 'ibmmq-secure'], + libraries: ['java'], module: MQConsumer }, { protocols: ['kafka', 'kafka-secure'], + libraries: ['java'], module: KafkaConsumer + }, + { + protocols: ['kafka', 'kafka-secure'], + libraries: ['spring'], + module: SpringKafkaConsumer } ]; function getModule({ asyncapi, params }) { const server = asyncapi.allServers().get(params.server); const protocol = server.protocol(); - const foundModule = consumerModuleMap.find(item => item.protocols.includes(protocol)); + const foundModule = consumerModuleMap.find(item => item.protocols.includes(protocol) && item.libraries.includes(params.library)); if (!foundModule) { - throw new Error(`This template does not currently support the protocol ${protocol}`); + throw new Error(`This template does not currently support the protocol ${protocol} and library ${params.library}`); } return foundModule.module; } diff --git a/components/Demo/Demo.js b/components/Demo/Demo.js index 47d88f3..70f6bef 100644 --- a/components/Demo/Demo.js +++ b/components/Demo/Demo.js @@ -23,6 +23,9 @@ import { getMessagePayload } from '../../utils/Models.utils'; import { PackageDeclaration } from '../Common'; export function Demo(asyncapi, params) { + if (params.library === 'spring') { + return null; + } const foundPubAndSub = asyncapi.allChannels().filterBy((chan) => { return chan.operations().filterBySend().length > 0 && chan.operations().filterByReceive().length > 0; diff --git a/components/Files/Consumers.js b/components/Files/Consumers.js index df76033..ea47abb 100644 --- a/components/Files/Consumers.js +++ b/components/Files/Consumers.js @@ -41,7 +41,7 @@ export function Consumers(asyncapi, channels, params) { - + diff --git a/components/Files/Producers.js b/components/Files/Producers.js index 3545ace..e14ff78 100644 --- a/components/Files/Producers.js +++ b/components/Files/Producers.js @@ -25,6 +25,7 @@ export function Producers(asyncapi, channels, params) { const name = channel.id(); const className = `${toJavaClassName(name)}Producer`; const packagePath = javaPackageToPath(params.package); + const classToExtend = params.library === 'spring' ? undefined : 'PubSubBase'; return ( @@ -33,7 +34,7 @@ export function Producers(asyncapi, channels, params) { - + diff --git a/components/PomHelper.js b/components/PomHelper.js index 4508f8c..d4765d4 100644 --- a/components/PomHelper.js +++ b/components/PomHelper.js @@ -21,7 +21,8 @@ import { render } from '@asyncapi/generator-react-sdk'; export function PomHelper({ server, params }) { // Resolve additional dependencies depending on protocol supplied const supportedProtocol = server.protocol(); - const dependencies = resolveDependencies(supportedProtocol); + const library = params.library; + const dependencies = resolveDependencies(supportedProtocol, library); let protocolDependencies = ''; diff --git a/components/Producer/SpringKafkaProducer.js b/components/Producer/SpringKafkaProducer.js new file mode 100644 index 0000000..8cc5267 --- /dev/null +++ b/components/Producer/SpringKafkaProducer.js @@ -0,0 +1,85 @@ +/* +* (c) Copyright IBM Corporation 2021 +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +export function ProducerDeclaration() { + return ` + private SpringKafkaProducer producer = null; + `; +} + +export function SendMessage() { + return ` + public void send(ModelContract modelContract) { + Serializable modelInstance = (Serializable) modelContract; + + try{ + // JSON encode and transmit + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + String json = ow.writeValueAsString(modelInstance); + + logger.info("Sending Message: " + json); + + ProducerRecord record = new ProducerRecord(topicName, json); + producer.send(record); + + }catch (Exception e){ + logger.severe("An error occured whilst attempting to send a message: " + e); + } + }`; +} + +export function ProducerConstructor({ name }) { + return ` + super(); + String id = "my-publisher"; + + logger.info("Pub application is starting"); + + // prepare connection for producer + createConnection("${name}", id); + + producer = ch.createProducer(); +`; +} + +export function ProducerClose() { + return ` + public void close() { + producer.close(); + } + `; +} + +export function ProducerImports({ params }) { + return ` +import java.util.logging.*; +import java.io.Serializable; +import java.util.UUID; + +import ${params.package}.ConnectionHelper; +import ${params.package}.LoggingHelper; +import ${params.package}.Connection; +import ${params.package}.PubSubBase; +import ${params.package}.models.ModelContract; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.annotation.JsonView; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; + `; +} diff --git a/components/Producer/index.js b/components/Producer/index.js index 87d68a3..cd60928 100644 --- a/components/Producer/index.js +++ b/components/Producer/index.js @@ -1,23 +1,31 @@ import * as MQProducer from './MQProducer'; import * as KafkaProducer from './KafkaProducer'; +import * as SpringKafkaProducer from './SpringKafkaProducer'; const producerModuleMap = [ { protocols: ['ibmmq', 'ibmmq-secure'], + libraries: ['java'], module: MQProducer }, { protocols: ['kafka', 'kafka-secure'], + libraries: ['java'], module: KafkaProducer + }, + { + protocols: ['kafka', 'kafka-secure'], + libraries: ['spring'], + module: SpringKafkaProducer } ]; function getModule({ asyncapi, params }) { const server = asyncapi.allServers().get(params.server); const protocol = server.protocol(); - const foundModule = producerModuleMap.find(item => item.protocols.includes(protocol)); + const foundModule = producerModuleMap.find(item => item.protocols.includes(protocol) && item.libraries.includes(params.library)); if (!foundModule) { - throw new Error(`This template does not currently support the protocol ${protocol}`); + throw new Error(`This template does not currently support the protocol ${protocol} and library ${params.library}`); } return foundModule.module; } diff --git a/components/PubSubBase/NoOpPubSubBase.js b/components/PubSubBase/NoOpPubSubBase.js new file mode 100644 index 0000000..2086499 --- /dev/null +++ b/components/PubSubBase/NoOpPubSubBase.js @@ -0,0 +1,19 @@ +/* +* (c) Copyright IBM Corporation 2021 +* +* Licensed under the Apache License, Version 2.0 (the "License"); +* you may not use this file except in compliance with the License. +* You may obtain a copy of the License at +* +* http://www.apache.org/licenses/LICENSE-2.0 +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific language governing permissions and +* limitations under the License. +*/ + +export function PubSubBase(params) { + return null; +} diff --git a/components/PubSubBase/index.js b/components/PubSubBase/index.js index 69ffa99..a7aacb7 100644 --- a/components/PubSubBase/index.js +++ b/components/PubSubBase/index.js @@ -1,23 +1,31 @@ import * as MQPubSubBase from './MQPubSubBase'; import * as KafkaPubSubBase from './KafkaPubSubBase'; +import * as NoOpPubSubBase from './NoOpPubSubBase'; const pubSubModuleMap = [ { protocols: ['ibmmq', 'ibmmq-secure'], + libraries: ['java'], module: MQPubSubBase }, { protocols: ['kafka', 'kafka-secure'], + libraries: ['java'], module: KafkaPubSubBase + }, + { + protocols: ['ibmmq', 'ibmmq-secure', 'kafka', 'kafka-secure'], + libraries: ['spring'], + module: NoOpPubSubBase } ]; function getModule({ asyncapi, params }) { const server = asyncapi.allServers().get(params.server); const protocol = server.protocol(); - const foundModule = pubSubModuleMap.find(item => item.protocols.includes(protocol)); + const foundModule = pubSubModuleMap.find(item => item.protocols.includes(protocol) && item.libraries.includes(params.library)); if (!foundModule) { - throw new Error(`This template does not currently support the protocol ${protocol}`); + throw new Error(`This template does not currently support the protocol ${protocol} and library ${params.library}`); } return foundModule.module; } diff --git a/components/Readme/index.js b/components/Readme/index.js index 7033717..30c8630 100644 --- a/components/Readme/index.js +++ b/components/Readme/index.js @@ -4,10 +4,12 @@ import { Readme as MQReadme } from './MQReadme'; const readmeModuleMap = [ { protocols: ['ibmmq', 'ibmmq-secure'], + libraries: ['java', 'spring'], module: MQReadme }, { protocols: ['kafka', 'kafka-secure'], + libraries: ['java', 'spring'], module: KafkaReadme } ]; @@ -15,9 +17,9 @@ const readmeModuleMap = [ export default function({ asyncapi, params }) { const server = asyncapi.allServers().get(params.server); const protocol = server.protocol(); - const foundModule = readmeModuleMap.find(item => item.protocols.includes(protocol)); + const foundModule = readmeModuleMap.find(item => item.protocols.includes(protocol) && item.libraries.includes(params.library)); if (!foundModule) { - throw new Error(`This template does not currently support the protocol ${protocol}`); + throw new Error(`This template does not currently support the protocol ${protocol} and library ${params.library}`); } return foundModule.module({ asyncapi, params }); } diff --git a/package.json b/package.json index 953baef..f8264d3 100644 --- a/package.json +++ b/package.json @@ -65,6 +65,11 @@ "description": "The server you want to use in the code.", "required": true }, + "library": { + "description": "The library to use.", + "required": false, + "default": "java" + }, "asyncapiFileDir": { "description": "Custom location of the AsyncAPI file that you provided as an input in generation. By default it is located in the root of the output directory" }, diff --git a/template/index.js b/template/index.js index cfb65bf..69f1543 100644 --- a/template/index.js +++ b/template/index.js @@ -59,6 +59,9 @@ export default function({ asyncapi, params }) { } function LoggingHelperRenderer(params) { + if (params.library === 'spring') { + return null; + } const filePath = `${javaPackageToPath(params.package)}LoggingHelper.java`; return ( @@ -80,6 +83,9 @@ function ModelContractRenderer(params) { } function ConnectionRender(asyncapi, params) { + if (params.library === 'spring') { + return null; + } const filePath = `${javaPackageToPath(params.package)}Connection.java`; return ( @@ -90,6 +96,9 @@ function ConnectionRender(asyncapi, params) { } function ConnectionHelperRenderer(asyncapi, params) { + if (params.library === 'spring') { + return null; + } const filePath = `${javaPackageToPath(params.package)}ConnectionHelper.java`; return ( @@ -100,6 +109,9 @@ function ConnectionHelperRenderer(asyncapi, params) { } function EnvJsonRenderer(asyncapi, params) { + if (params.library === 'spring') { + return null; + } return ( diff --git a/test/SpringKafka.test.js b/test/SpringKafka.test.js new file mode 100644 index 0000000..6ddfef7 --- /dev/null +++ b/test/SpringKafka.test.js @@ -0,0 +1,75 @@ +const path = require('path'); +const Generator = require('@asyncapi/generator'); +const { existsSync, readFileSync } = require('fs'); +const crypto = require('crypto'); + +const MAIN_TEST_RESULT_PATH = path.join('test', 'temp', 'integrationTestResult'); + +/* + * Please note: This test file was adapted from + * https://github.com/asyncapi/java-spring-cloud-stream-template/blob/master/test/integration.test.js + */ + +describe('template spring kafka tests using the generator', () => { + const generateFolderName = () => { + // You always want to generate to new directory to make sure test runs in clear environment + return path.resolve(MAIN_TEST_RESULT_PATH, crypto.randomBytes(4).toString('hex')); + }; + + jest.setTimeout(30000); + + it('should generate dynamic files for spring kafka', async () => { + jest.setTimeout(30000); + + const OUTPUT_DIR = generateFolderName(); + const PACKAGE = 'com.asyncapi'; + const PACKAGE_PATH = path.join(...PACKAGE.split('.')); + const params = { + server: 'production', + library: 'spring', + }; + + const generator = new Generator(path.normalize('./'), OUTPUT_DIR, { forceWrite: true, templateParams: params }); + + const inputAsyncApiSchema = 'kafka-example'; + await generator.generateFromFile(path.resolve('test', `mocks/${inputAsyncApiSchema}.yml`)); + + const channelName = 'SongReleased'; + + const expectedFiles = [ + 'pom.xml', + `${PACKAGE_PATH}/${channelName}Producer.java`, + `${PACKAGE_PATH}/${channelName}Subscriber.java`, + `${PACKAGE_PATH}/models/ModelContract.java`, + `${PACKAGE_PATH}/models/Song.java`, + ]; + + const notExpectedFiles = [ + `${PACKAGE_PATH}/Connection.java`, + `${PACKAGE_PATH}/ConnectionHelper.java`, + `${PACKAGE_PATH}/DemoProducer.java`, + `${PACKAGE_PATH}/DemoSubscriber.java`, + 'env.json', + ]; + + for (const index in expectedFiles) { + expect(existsSync(path.join(OUTPUT_DIR, expectedFiles[index]))).toBe(true); + } + + for (const index in notExpectedFiles) { + expect(existsSync(path.join(OUTPUT_DIR, notExpectedFiles[index]))).toBe(false); + } + + const expectedFileContents = [ + `${channelName}Producer.java`, + `${channelName}Subscriber.java`, + ]; + + const EXPECTED_DIR = 'test/outputs/spring'; + for (const index in expectedFileContents) { + const actual = readFileSync(`${OUTPUT_DIR}/${PACKAGE_PATH}/${expectedFileContents[index]}`, { encoding: 'utf8' }); + const expected = readFileSync(`${EXPECTED_DIR}/${inputAsyncApiSchema}/${expectedFileContents[index]}`, { encoding: 'utf8' }); + expect(actual).toContain(expected); + } + }); +}); diff --git a/test/outputs/spring/kafka-example/SongReleasedProducer.java b/test/outputs/spring/kafka-example/SongReleasedProducer.java new file mode 100644 index 0000000..596eb54 --- /dev/null +++ b/test/outputs/spring/kafka-example/SongReleasedProducer.java @@ -0,0 +1,59 @@ +package com.asyncapi; + +import java.util.logging.*; +import java.io.Serializable; +import java.util.UUID; + +import com.asyncapi.ConnectionHelper; +import com.asyncapi.LoggingHelper; +import com.asyncapi.Connection; +import com.asyncapi.PubSubBase; +import com.asyncapi.models.ModelContract; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.ObjectWriter; +import com.fasterxml.jackson.annotation.JsonView; + +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; + +import com.asyncapi.models.Song; +public class SongReleasedProducer { + + private SpringKafkaProducer producer = null; + + public SongReleasedProducer() { + + super(); + String id = "my-publisher"; + + logger.info("Pub application is starting"); + + // prepare connection for producer + createConnection("song.released", id); + + producer = ch.createProducer(); + + } + public void send(ModelContract modelContract) { + Serializable modelInstance = (Serializable) modelContract; + + try{ + // JSON encode and transmit + ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter(); + String json = ow.writeValueAsString(modelInstance); + + logger.info("Sending Message: " + json); + + ProducerRecord record = new ProducerRecord(topicName, json); + producer.send(record); + + }catch (Exception e){ + logger.severe("An error occured whilst attempting to send a message: " + e); + } + } + public void close() { + producer.close(); + } + +} \ No newline at end of file diff --git a/test/outputs/spring/kafka-example/SongReleasedSubscriber.java b/test/outputs/spring/kafka-example/SongReleasedSubscriber.java new file mode 100644 index 0000000..38411f3 --- /dev/null +++ b/test/outputs/spring/kafka-example/SongReleasedSubscriber.java @@ -0,0 +1,25 @@ +package com.asyncapi; + +import com.asyncapi.PubSubBase; +import com.asyncapi.models.ModelContract; +import com.asyncapi.models.Song; + +import org.springframework.kafka.annotation.KafkaConsumer; + +public class SongReleasedSubscriber extends PubSubBase{ + + private final String channelName; + + public SongReleasedSubscriber() { + + super(); + this.channelName = "song.released"; + + } + @KafkaConsumer(topics = channelName ) + public void receive(Song song) { + logger.info("Received message type: " + receivedObject.getClass().getName()); + } + } + +} \ No newline at end of file diff --git a/utils/DependencyResolver.utils.js b/utils/DependencyResolver.utils.js index ded47aa..0a2e080 100644 --- a/utils/DependencyResolver.utils.js +++ b/utils/DependencyResolver.utils.js @@ -1,24 +1,40 @@ const dependencyMap = [ - { + { protocols: ['ibmmq', 'ibmmq-secure'], + libraries: 'java', dependencies: [ { groupId: 'com.ibm.mq', artifactId: 'com.ibm.mq.allclient', version: '9.2.3.0' } ] }, { protocols: ['kafka', 'kafka-secure'], + libraries: 'java', dependencies: [ { groupId: 'org.apache.kafka', artifactId: 'kafka-clients', version: '2.8.0' } ] - } + }, + { + protocols: ['ibmmq', 'ibmmq-secure'], + libraries: 'spring', + dependencies: [ + { groupId: 'com.ibm.mq', artifactId: 'mq-jms-spring-boot-starter', version: '3.3.1' } + ] + }, + { + protocols: ['kafka', 'kafka-secure'], + libraries: 'spring', + dependencies: [ + { groupId: 'org.springframework.kafka', artifactId: 'spring-kafka', version: '3.2.4' } + ] + }, ]; -export function resolveDependencies(protocol) { - const foundMapping = dependencyMap.find(item => item.protocols.includes(protocol)); +export function resolveDependencies(protocol, library) { + const foundMapping = dependencyMap.find(item => item.protocols.includes(protocol) && item.libraries.includes(library)); if (!foundMapping) { // This will never throw if the protocols in package.json match the dependency map above - throw new Error(`This template does not currently support the protocol ${protocol}`); + throw new Error(`This template does not currently support the protocol ${protocol} and library ${library}`); } return foundMapping.dependencies; diff --git a/utils/Models.utils.js b/utils/Models.utils.js index d2c7f5c..55249ea 100644 --- a/utils/Models.utils.js +++ b/utils/Models.utils.js @@ -16,7 +16,6 @@ export function collateModels(asyncapi) { return models; } - // The rest of the generator depends on a message object // having a payload with properties. This is needed to // be able to generate Java classes with attributes