Skip to content

Commit

Permalink
[TEST] - More load/save tests for steps
Browse files Browse the repository at this point in the history
  • Loading branch information
mbatchelor committed May 26, 2016
1 parent be4751e commit 0b99fe1
Show file tree
Hide file tree
Showing 9 changed files with 358 additions and 20 deletions.
Expand Up @@ -2,7 +2,7 @@
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
Expand Down Expand Up @@ -244,10 +244,12 @@ public void setIncludeFilename( boolean includeFilename ) {
this.includeFilename = includeFilename;
}

@Override
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode );
}

@Override
public Object clone() {
XBaseInputMeta retval = (XBaseInputMeta) super.clone();
return retval;
Expand All @@ -274,6 +276,7 @@ private void readData( Node stepnode ) throws KettleXMLException {
}
}

@Override
public void setDefault() {
dbfFileName = null;
rowLimit = 0;
Expand All @@ -288,6 +291,7 @@ public String getLookupStepname() {
return null;
}

@Override
public void searchInfoAndTargetSteps( List<StepMeta> steps ) {
acceptingStep = StepMeta.findStep( steps, acceptingStepName );
}
Expand Down Expand Up @@ -352,6 +356,7 @@ public void getFields( RowMetaInterface row, String name, RowMetaInterface[] inf
row.addRowMeta( getOutputFields( fileList, name ) );
}

@Override
public String getXML() {
StringBuilder retval = new StringBuilder();

Expand All @@ -366,12 +371,16 @@ public String getXML() {

retval.append( " " + XMLHandler.addTagValue( "accept_filenames", acceptingFilenames ) );
retval.append( " " + XMLHandler.addTagValue( "accept_field", acceptingField ) );
if ( ( acceptingStepName == null ) && ( acceptingStep != null ) ) {
acceptingStepName = acceptingStep.getName();
}
retval.append( " "
+ XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep.getName() : "" ) ) );
+ XMLHandler.addTagValue( "accept_stepname", acceptingStepName ) );

return retval.toString();
}

@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
try {
dbfFileName = rep.getStepAttributeString( id_step, "file_dbf" );
Expand All @@ -393,6 +402,7 @@ public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, Lis
}
}

@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
try {
rep.saveStepAttribute( id_transformation, id_step, "file_dbf", dbfFileName );
Expand All @@ -406,15 +416,19 @@ public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transform

rep.saveStepAttribute( id_transformation, id_step, "accept_filenames", acceptingFilenames );
rep.saveStepAttribute( id_transformation, id_step, "accept_field", acceptingField );
rep.saveStepAttribute( id_transformation, id_step, "accept_stepname", ( acceptingStep != null
? acceptingStep.getName() : "" ) );
if ( ( acceptingStepName == null ) && ( acceptingStep != null ) ) {
acceptingStepName = acceptingStep.getName();
}
rep.saveStepAttribute( id_transformation, id_step, "accept_stepname", acceptingStepName );

} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString(
PKG, "XBaseInputMeta.Exception.UnableToSaveMetaDataToRepository" )
+ id_step, e );
}
}

@Override
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ) {
Expand Down Expand Up @@ -474,11 +488,13 @@ public void check( List<CheckResultInterface> remarks, TransMeta transMeta, Step
}
}

@Override
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr,
Trans trans ) {
return new XBaseInput( stepMeta, stepDataInterface, cnr, tr, trans );
}

@Override
public StepDataInterface getStepData() {
return new XBaseInputData();
}
Expand All @@ -493,6 +509,7 @@ public FileInputList getTextFileList( VariableSpace space ) {
space, new String[] { dbfFileName }, new String[] { null }, new String[] { null }, new String[] { "N" } );
}

@Override
public String[] getUsedLibraries() {
return new String[] { "javadbf.jar", };
}
Expand Down Expand Up @@ -529,6 +546,7 @@ public void setCharactersetName( String charactersetName ) {
*
* @return the filename of the exported resource
*/
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
try {
Expand Down
@@ -0,0 +1,92 @@
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.tableoutput;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.junit.Before;
import org.junit.Test;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
import org.pentaho.di.trans.steps.loadsave.initializer.InitializerInterface;
import org.pentaho.di.trans.steps.loadsave.validator.ArrayLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.StringLoadSaveValidator;

public class TableOutputMetaLoadSaveTest implements InitializerInterface<StepMetaInterface> {
LoadSaveTester loadSaveTester;
Class<TableOutputMeta> testMetaClass = TableOutputMeta.class;

@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "databaseMeta", "schemaName", "tableName", "commitSize", "truncateTable", "ignoreErrors", "useBatchUpdate",
"partitioningEnabled", "partitioningField", "partitioningDaily", "partitioningMonthly", "tableNameInField", "tableNameField",
"tableNameInTable", "returningGeneratedKeys", "generatedKeyField", "specifyFields", "fieldStream", "fieldDatabase" );

Map<String, String> getterMap = new HashMap<String, String>() {
{
put( "truncateTable", "truncateTable" );
put( "ignoreErrors", "ignoreErrors" );
put( "useBatchUpdate", "useBatchUpdate" );
put( "specifyFields", "specifyFields" );
}
};
Map<String, String> setterMap = new HashMap<String, String>();

FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator =
new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 5 );

Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
attrValidatorMap.put( "fieldStream", stringArrayLoadSaveValidator );
attrValidatorMap.put( "fieldDatabase", stringArrayLoadSaveValidator );

Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();

loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, new ArrayList<String>(), new ArrayList<String>(),
getterMap, setterMap, attrValidatorMap, typeValidatorMap, this );
}

// Call the allocate method on the LoadSaveTester meta class
@Override
public void modify( StepMetaInterface someMeta ) {
if ( someMeta instanceof TableOutputMeta ) {
( (TableOutputMeta) someMeta ).allocate( 5 );
}
}

@Test
public void testSerialization() throws KettleException {
loadSaveTester.testSerialization();
}

}
Expand Up @@ -100,13 +100,8 @@ public void setUp() throws Exception {
}

@Test
public void testLoadSaveXML() throws KettleException {
loadSaveTester.testXmlRoundTrip();
}

@Test
public void testLoadSaveRepo() throws KettleException {
loadSaveTester.testRepoRoundTrip();
public void testSerialization() throws KettleException {
loadSaveTester.testSerialization();
}


Expand Down
Expand Up @@ -2,7 +2,7 @@
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
Expand Down Expand Up @@ -76,7 +76,6 @@ public void testRoundTrip() throws KettleException {
new LoadSaveTester( UniqueRowsMeta.class, attributes, getterMap, setterMap,
fieldLoadSaveValidatorAttributeMap, new HashMap<String, FieldLoadSaveValidator<?>>() );

loadSaveTester.testRepoRoundTrip();
loadSaveTester.testXmlRoundTrip();
loadSaveTester.testSerialization();
}
}
Expand Up @@ -2,7 +2,7 @@
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
Expand Down Expand Up @@ -66,7 +66,6 @@ public void testRoundTrip() throws KettleException {
new LoadSaveTester( UniqueRowsByHashSetMeta.class, attributes, getterMap, setterMap,
fieldLoadSaveValidatorAttributeMap, new HashMap<String, FieldLoadSaveValidator<?>>() );

loadSaveTester.testRepoRoundTrip();
loadSaveTester.testXmlRoundTrip();
loadSaveTester.testSerialization();
}
}
Expand Up @@ -2,7 +2,7 @@
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2014 by Pentaho : http://www.pentaho.com
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
Expand Down Expand Up @@ -144,8 +144,7 @@ public void loadSaveRoundTripTest() throws KettleException {
new HashMap<String, String>(), new HashMap<String, FieldLoadSaveValidator<?>>(),
fieldLoadSaveValidatorTypeMap );

loadSaveTester.testRepoRoundTrip();
loadSaveTester.testXmlRoundTrip();
loadSaveTester.testSerialization();
}

private void assertContains( Map<String, Integer> map, String key, Integer value ) {
Expand Down
@@ -0,0 +1,101 @@
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.webservices;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.UUID;

import org.apache.commons.lang.builder.EqualsBuilder;
import org.junit.Before;
import org.junit.Test;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.ListLoadSaveValidator;
import org.pentaho.di.trans.steps.webservices.wsdl.XsdType;

public class WebServiceMetaLoadSaveTest {
LoadSaveTester loadSaveTester;
Class<WebServiceMeta> testMetaClass = WebServiceMeta.class;

@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "url", "operationName", "operationRequestName", "operationNamespace", "inFieldContainerName",
"inFieldArgumentName", "outFieldContainerName", "outFieldArgumentName", "proxyHost", "proxyPort", "httpLogin",
"httpPassword", "passingInputData", "callStep", "compatible", "repeatingElementName", "returningReplyAsString",
"fieldsIn", "fieldsOut" );

Map<String, String> getterMap = new HashMap<String, String>();
Map<String, String> setterMap = new HashMap<String, String>();

Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
attrValidatorMap.put( "fieldsIn",
new ListLoadSaveValidator<WebServiceField>( new WebServiceFieldLoadSaveValidator(), 5 ) );
attrValidatorMap.put( "fieldsOut",
new ListLoadSaveValidator<WebServiceField>( new WebServiceFieldLoadSaveValidator(), 5 ) );

Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();

loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap );
}

@Test
public void testSerialization() throws KettleException {
loadSaveTester.testSerialization();
}

public class WebServiceFieldLoadSaveValidator implements FieldLoadSaveValidator<WebServiceField> {
final Random rand = new Random();
@Override
public WebServiceField getTestObject() {
WebServiceField rtn = new WebServiceField();
rtn.setName( UUID.randomUUID().toString() );
rtn.setWsName( UUID.randomUUID().toString() );
rtn.setXsdType( XsdType.TYPES[ rand.nextInt( XsdType.TYPES.length )] );
return rtn;
}

@Override
public boolean validateTestObject( WebServiceField testObject, Object actual ) {
if ( !( actual instanceof WebServiceField ) ) {
return false;
}
WebServiceField another = (WebServiceField) actual;
return new EqualsBuilder()
.append( testObject.getName(), another.getName() )
.append( testObject.getWsName(), another.getWsName() )
.append( testObject.getXsdType(), another.getXsdType() )
.isEquals();
}
}

}

0 comments on commit 0b99fe1

Please sign in to comment.