Skip to content
This repository was archived by the owner on Apr 22, 2024. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
120 changes: 119 additions & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<cdap.version>6.1.0-SNAPSHOT</cdap.version>
<hydrator.version>2.3.0-SNAPSHOT</hydrator.version>
<guava.version>19.0</guava.version>
<guava.version>27.0.1-jre</guava.version>
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why is guava version bumped?

<testSourceLocation>${project.basedir}/src/test/java/</testSourceLocation>
</properties>

<repositories>
Expand Down Expand Up @@ -83,6 +84,7 @@
</dependencies>

<build>
<testSourceDirectory>${testSourceLocation}</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
Expand Down Expand Up @@ -188,7 +190,123 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>

<profiles>

<profile>
<id>e2e-tests</id>
<properties>
<testSourceLocation>src/e2e-test/java</testSourceLocation>
</properties>
<build>
<testResources>
<testResource>
<directory>src/e2e-test/resources</directory>
</testResource>
</testResources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>

<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>3.0.0-M5</version>
<configuration>
<includes>
<include>TestRunner.java</include>
</includes>
<!--Start configuration to run TestRunners in parallel-->
<parallel>classes</parallel> <!--Running TestRunner classes in parallel-->
<threadCount>2</threadCount> <!--Number of classes to run in parallel-->
<forkCount>2</forkCount> <!--Number of JVM processes -->
<reuseForks>true</reuseForks>
<!--End configuration to run TestRunners in parallel-->
<environmentVariables>
<GOOGLE_APPLICATION_CREDENTIALS>
${GOOGLE_APPLICATION_CREDENTIALS}
</GOOGLE_APPLICATION_CREDENTIALS>
<SERVICE_ACCOUNT_TYPE>
${SERVICE_ACCOUNT_TYPE}
</SERVICE_ACCOUNT_TYPE>
<SERVICE_ACCOUNT_FILE_PATH>
${SERVICE_ACCOUNT_FILE_PATH}
</SERVICE_ACCOUNT_FILE_PATH>
<SERVICE_ACCOUNT_JSON>
${SERVICE_ACCOUNT_JSON}
</SERVICE_ACCOUNT_JSON>
</environmentVariables>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
</goals>
</execution>
</executions>
</plugin>

<plugin>
<groupId>net.masterthought</groupId>
<artifactId>maven-cucumber-reporting</artifactId>
<version>5.5.0</version>

<executions>
<execution>
<id>execution</id>
<phase>verify</phase>
<goals>
<goal>generate</goal>
</goals>
<configuration>
<projectName>Cucumber Reports</projectName> <!-- Replace with project name -->
<outputDirectory>target/cucumber-reports/advanced-reports</outputDirectory>
<buildNumber>1</buildNumber>
<skip>false</skip>
<inputDirectory>${project.build.directory}/cucumber-reports</inputDirectory>
<jsonFiles> <!-- supports wildcard or name pattern -->
<param>**/*.json</param>
</jsonFiles> <!-- optional, defaults to outputDirectory if not specified -->
<classificationDirectory>${project.build.directory}/cucumber-reports</classificationDirectory>
<checkBuildResult>true</checkBuildResult>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>

<dependencies>
<dependency>
<groupId>io.cdap.tests.e2e</groupId>
<artifactId>cdap-e2e-framework</artifactId>
<version>0.0.1-SNAPSHOT</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.8</version>
<scope>runtime</scope>
</dependency>
</dependencies>
</profile>

</profiles>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
@DateTransform
Feature:DateTransform - Verify DateTransform Plugin Error scenarios

@BQ_SOURCE_DATETRANSFORM_TEST
Scenario:Verify DateTransform plugin validation errors for mandatory fields
Given Open Datafusion Project to configure pipeline
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Transform"
When Select plugin: "Date Transform" from the plugins list as: "Transform"
Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "projectId" with value: "projectId"
Then Enter input plugin property: "datasetProjectId" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Capture the generated Output Schema
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "DateTransform"
Then Click on the Validate button
Then Verify mandatory property error for below listed properties:
| sourceField |
| targetField |

@BQ_SOURCE_DATETRANSFORM_TEST
Scenario:Verify DateTransform plugin error for invalid Source Field Name
Given Open Datafusion Project to configure pipeline
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Transform"
When Select plugin: "Date Transform" from the plugins list as: "Transform"
Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "projectId" with value: "projectId"
Then Enter input plugin property: "datasetProjectId" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Capture the generated Output Schema
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "DateTransform"
Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.IncorrectFieldName"
Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldName"
Then Click on the Validate button
Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformInvalidSourceFieldName" on the header

@BQ_SOURCE_DATETRANSFORM_TEST
Scenario:Verify DateTransform plugin error for invalid Target Field Name
Given Open Datafusion Project to configure pipeline
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Transform"
When Select plugin: "Date Transform" from the plugins list as: "Transform"
Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "projectId" with value: "projectId"
Then Enter input plugin property: "datasetProjectId" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Capture the generated Output Schema
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "DateTransform"
Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldName"
Then Enter input plugin property: "targetFieldName" with value: "dateTransform.IncorrectFieldName"
Then Click on the Validate button
Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformInvalidTargetFieldName" on the header

@BQ_SOURCE_DATETRANSFORM_TEST
Scenario:Verify DateTransform plugin error for Source and Target field must have same number of fields
Given Open Datafusion Project to configure pipeline
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Transform"
When Select plugin: "Date Transform" from the plugins list as: "Transform"
Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "projectId" with value: "projectId"
Then Enter input plugin property: "datasetProjectId" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Capture the generated Output Schema
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "DateTransform"
Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldNames"
Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldName"
Then Click on the Validate button
Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformMustHaveSameNumberOfFields" on the header

@BQ_SOURCE_DATETRANSFORM_TEST
Scenario:Verify DateTransform plugin error for No Input Schema available
Given Open Datafusion Project to configure pipeline
When Expand Plugin group in the LHS plugins list: "Transform"
When Select plugin: "Date Transform" from the plugins list as: "Transform"
Then Navigate to the properties page of plugin: "DateTransform"
Then Enter input plugin property: "sourceFieldName" with value: "dateTransform.SourceFieldName"
Then Enter input plugin property: "targetFieldName" with value: "dateTransform.TargetFieldName"
Then Click on the Validate button
Then Verify that the Plugin is displaying an error message: "errorMessageDateTransformForInputSchema" on the header
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
@DateTransform
Feature:DateTransform - Verification of DateTransform pipeline with BigQuery as source and target using macros

@BQ_SINK_TEST @BQ_SOURCE_DATETRANSFORM_TEST @PLUGIN-1224
Scenario: To verify data is getting transferred from BigQuery to BigQuery successfully with DateTransform plugin properties as macro arguments
Given Open Datafusion Project to configure pipeline
When Select plugin: "BigQuery" from the plugins list as: "Source"
When Expand Plugin group in the LHS plugins list: "Transform"
When Select plugin: "Date Transform" from the plugins list as: "Transform"
Then Connect plugins: "BigQuery" and "DateTransform" to establish connection
Then Navigate to the properties page of plugin: "BigQuery"
Then Replace input plugin property: "projectId" with value: "projectId"
Then Enter input plugin property: "datasetProjectId" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqSourceTable"
Then Click on the Get Schema button
Then Capture the generated Output Schema
Then Validate "BigQuery" plugin properties
Then Close the Plugin Properties page
Then Navigate to the properties page of plugin: "DateTransform"
Then Click on the Macro button of Property: "SourceFieldName" and set the value to: "dateTransform.SourceFieldName"
Then Click on the Macro button of Property: "SourceFieldDateFormat" and set the value to: "dateTransform.SourceFieldDateFormat"
Then Click on the Macro button of Property: "TargetFieldName" and set the value to: "dateTransform.TargetFieldName"
Then Click on the Macro button of Property: "TargetFieldDateFormat" and set the value to: "dateTransform.TargetFieldDateFormat"
Then Validate "Date Transform" plugin properties
Then Close the Plugin Properties page
When Expand Plugin group in the LHS plugins list: "Sink"
When Select plugin: "BigQuery" from the plugins list as: "Sink"
Then Connect plugins: "DateTransform" and "BigQuery2" to establish connection
Then Navigate to the properties page of plugin: "BigQuery2"
Then Replace input plugin property: "projectId" with value: "projectId"
Then Enter input plugin property: "datasetProjectId" with value: "projectId"
Then Enter input plugin property: "referenceName" with value: "BQReferenceName"
Then Enter input plugin property: "dataset" with value: "dataset"
Then Enter input plugin property: "table" with value: "bqTargetTable"
Then Validate "BigQuery2" plugin properties
Then Close the Plugin Properties page
Then Save the pipeline
Then Preview and run the pipeline
Then Enter runtime argument value "SourceFieldName" for key "dateTransform.SourceFieldName"
Then Enter runtime argument value "SourceFieldDateFormat" for key "dateTransform.SourceFieldDateFormat"
Then Enter runtime argument value "TargetFieldName" for key "dateTransform.TargetFieldName"
Then Enter runtime argument value "TargetFieldDateFormat" for key "dateTransform.TargetFieldDateFormat"
Then Run the preview of pipeline with runtime arguments
Then Wait till pipeline preview is in running state
Then Open and capture pipeline preview logs
Then Verify the preview run status of pipeline in the logs is "succeeded"
Then Close the pipeline logs
Then Click on the Preview Data link on the Sink plugin node: "BigQueryTable"
Then Verify sink plugin's Preview Data for Input Records table and the Input Schema matches the Output Schema of Source plugin
Then Close the preview data
Then Deploy the pipeline
Then Run the Pipeline in Runtime
Then Enter runtime argument value "SourceFieldName" for key "dateTransform.SourceFieldName"
Then Enter runtime argument value "SourceFieldDateFormat" for key "dateTransform.SourceFieldDateFormat"
Then Enter runtime argument value "TargetFieldName" for key "dateTransform.TargetFieldName"
Then Enter runtime argument value "TargetFieldDateFormat" for key "dateTransform.TargetFieldDateFormat"
Then Run the Pipeline in Runtime with runtime arguments
Then Wait till pipeline is in running state
Then Open and capture logs
Then Verify the pipeline status is "Succeeded"
Then Close the pipeline logs
Then Validate OUT record count is equal to IN record count
Then Validate dateFormat "dateTransform.TargetFieldDateFormat" of the fields "dateTransform.TargetFieldName" in target BQ table "bqTargetTable"
Loading