diff --git a/engine/src/org/pentaho/di/trans/steps/xbaseinput/XBaseInputMeta.java b/engine/src/org/pentaho/di/trans/steps/xbaseinput/XBaseInputMeta.java index 699e4eda775a..3a4f0cb5e22e 100644 --- a/engine/src/org/pentaho/di/trans/steps/xbaseinput/XBaseInputMeta.java +++ b/engine/src/org/pentaho/di/trans/steps/xbaseinput/XBaseInputMeta.java @@ -2,7 +2,7 @@ * * Pentaho Data Integration * - * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * @@ -244,10 +244,12 @@ public void setIncludeFilename( boolean includeFilename ) { this.includeFilename = includeFilename; } + @Override public void loadXML( Node stepnode, List databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); } + @Override public Object clone() { XBaseInputMeta retval = (XBaseInputMeta) super.clone(); return retval; @@ -274,6 +276,7 @@ private void readData( Node stepnode ) throws KettleXMLException { } } + @Override public void setDefault() { dbfFileName = null; rowLimit = 0; @@ -288,6 +291,7 @@ public String getLookupStepname() { return null; } + @Override public void searchInfoAndTargetSteps( List steps ) { acceptingStep = StepMeta.findStep( steps, acceptingStepName ); } @@ -352,6 +356,7 @@ public void getFields( RowMetaInterface row, String name, RowMetaInterface[] inf row.addRowMeta( getOutputFields( fileList, name ) ); } + @Override public String getXML() { StringBuilder retval = new StringBuilder(); @@ -366,12 +371,16 @@ public String getXML() { retval.append( " " + XMLHandler.addTagValue( "accept_filenames", acceptingFilenames ) ); retval.append( " " + XMLHandler.addTagValue( "accept_field", acceptingField ) ); + if ( ( acceptingStepName == null ) && ( acceptingStep != null ) ) { + acceptingStepName = acceptingStep.getName(); + } retval.append( " " - + XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep.getName() : "" ) ) ); + + XMLHandler.addTagValue( "accept_stepname", acceptingStepName ) ); return retval.toString(); } + @Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List databases ) throws KettleException { try { dbfFileName = rep.getStepAttributeString( id_step, "file_dbf" ); @@ -393,6 +402,7 @@ public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, Lis } } + @Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "file_dbf", dbfFileName ); @@ -406,8 +416,11 @@ public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transform rep.saveStepAttribute( id_transformation, id_step, "accept_filenames", acceptingFilenames ); rep.saveStepAttribute( id_transformation, id_step, "accept_field", acceptingField ); - rep.saveStepAttribute( id_transformation, id_step, "accept_stepname", ( acceptingStep != null - ? acceptingStep.getName() : "" ) ); + if ( ( acceptingStepName == null ) && ( acceptingStep != null ) ) { + acceptingStepName = acceptingStep.getName(); + } + rep.saveStepAttribute( id_transformation, id_step, "accept_stepname", acceptingStepName ); + } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "XBaseInputMeta.Exception.UnableToSaveMetaDataToRepository" ) @@ -415,6 +428,7 @@ public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transform } } + @Override public void check( List remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ) { @@ -474,11 +488,13 @@ public void check( List remarks, TransMeta transMeta, Step } } + @Override public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ) { return new XBaseInput( stepMeta, stepDataInterface, cnr, tr, trans ); } + @Override public StepDataInterface getStepData() { return new XBaseInputData(); } @@ -493,6 +509,7 @@ public FileInputList getTextFileList( VariableSpace space ) { space, new String[] { dbfFileName }, new String[] { null }, new String[] { null }, new String[] { "N" } ); } + @Override public String[] getUsedLibraries() { return new String[] { "javadbf.jar", }; } @@ -529,6 +546,7 @@ public void setCharactersetName( String charactersetName ) { * * @return the filename of the exported resource */ + @Override public String exportResources( VariableSpace space, Map definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException { try { diff --git a/engine/test-src/org/pentaho/di/trans/steps/tableoutput/TableOutputMetaLoadSaveTest.java b/engine/test-src/org/pentaho/di/trans/steps/tableoutput/TableOutputMetaLoadSaveTest.java new file mode 100644 index 000000000000..ced5858f8d5c --- /dev/null +++ b/engine/test-src/org/pentaho/di/trans/steps/tableoutput/TableOutputMetaLoadSaveTest.java @@ -0,0 +1,92 @@ +/*! ****************************************************************************** + * + * Pentaho Data Integration + * + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com + * + ******************************************************************************* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + ******************************************************************************/ +package org.pentaho.di.trans.steps.tableoutput; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.pentaho.di.core.KettleEnvironment; +import org.pentaho.di.core.exception.KettleException; +import org.pentaho.di.core.plugins.PluginRegistry; +import org.pentaho.di.trans.step.StepMetaInterface; +import org.pentaho.di.trans.steps.loadsave.LoadSaveTester; +import org.pentaho.di.trans.steps.loadsave.initializer.InitializerInterface; +import org.pentaho.di.trans.steps.loadsave.validator.ArrayLoadSaveValidator; +import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator; +import org.pentaho.di.trans.steps.loadsave.validator.StringLoadSaveValidator; + +public class TableOutputMetaLoadSaveTest implements InitializerInterface { + LoadSaveTester loadSaveTester; + Class testMetaClass = TableOutputMeta.class; + + @Before + public void setUpLoadSave() throws Exception { + KettleEnvironment.init(); + PluginRegistry.init( true ); + List attributes = + Arrays.asList( "databaseMeta", "schemaName", "tableName", "commitSize", "truncateTable", "ignoreErrors", "useBatchUpdate", + "partitioningEnabled", "partitioningField", "partitioningDaily", "partitioningMonthly", "tableNameInField", "tableNameField", + "tableNameInTable", "returningGeneratedKeys", "generatedKeyField", "specifyFields", "fieldStream", "fieldDatabase" ); + + Map getterMap = new HashMap() { + { + put( "truncateTable", "truncateTable" ); + put( "ignoreErrors", "ignoreErrors" ); + put( "useBatchUpdate", "useBatchUpdate" ); + put( "specifyFields", "specifyFields" ); + } + }; + Map setterMap = new HashMap(); + + FieldLoadSaveValidator stringArrayLoadSaveValidator = + new ArrayLoadSaveValidator( new StringLoadSaveValidator(), 5 ); + + Map> attrValidatorMap = new HashMap>(); + attrValidatorMap.put( "fieldStream", stringArrayLoadSaveValidator ); + attrValidatorMap.put( "fieldDatabase", stringArrayLoadSaveValidator ); + + Map> typeValidatorMap = new HashMap>(); + + loadSaveTester = + new LoadSaveTester( testMetaClass, attributes, new ArrayList(), new ArrayList(), + getterMap, setterMap, attrValidatorMap, typeValidatorMap, this ); + } + + // Call the allocate method on the LoadSaveTester meta class + @Override + public void modify( StepMetaInterface someMeta ) { + if ( someMeta instanceof TableOutputMeta ) { + ( (TableOutputMeta) someMeta ).allocate( 5 ); + } + } + + @Test + public void testSerialization() throws KettleException { + loadSaveTester.testSerialization(); + } + +} diff --git a/engine/test-src/org/pentaho/di/trans/steps/transexecutor/TransExecutorMetaTest.java b/engine/test-src/org/pentaho/di/trans/steps/transexecutor/TransExecutorMetaTest.java index 2bc17c53dc07..149cf3dad6fe 100644 --- a/engine/test-src/org/pentaho/di/trans/steps/transexecutor/TransExecutorMetaTest.java +++ b/engine/test-src/org/pentaho/di/trans/steps/transexecutor/TransExecutorMetaTest.java @@ -100,13 +100,8 @@ public void setUp() throws Exception { } @Test - public void testLoadSaveXML() throws KettleException { - loadSaveTester.testXmlRoundTrip(); - } - - @Test - public void testLoadSaveRepo() throws KettleException { - loadSaveTester.testRepoRoundTrip(); + public void testSerialization() throws KettleException { + loadSaveTester.testSerialization(); } diff --git a/engine/test-src/org/pentaho/di/trans/steps/uniquerows/UniqueRowsMetaTest.java b/engine/test-src/org/pentaho/di/trans/steps/uniquerows/UniqueRowsMetaTest.java index ed7ec438afb1..62a210a481e1 100644 --- a/engine/test-src/org/pentaho/di/trans/steps/uniquerows/UniqueRowsMetaTest.java +++ b/engine/test-src/org/pentaho/di/trans/steps/uniquerows/UniqueRowsMetaTest.java @@ -2,7 +2,7 @@ * * Pentaho Data Integration * - * Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * @@ -76,7 +76,6 @@ public void testRoundTrip() throws KettleException { new LoadSaveTester( UniqueRowsMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidatorAttributeMap, new HashMap>() ); - loadSaveTester.testRepoRoundTrip(); - loadSaveTester.testXmlRoundTrip(); + loadSaveTester.testSerialization(); } } diff --git a/engine/test-src/org/pentaho/di/trans/steps/uniquerowsbyhashset/UniqueRowsByHashSetMetaTest.java b/engine/test-src/org/pentaho/di/trans/steps/uniquerowsbyhashset/UniqueRowsByHashSetMetaTest.java index b31e9218b87c..1221de2d0c5e 100644 --- a/engine/test-src/org/pentaho/di/trans/steps/uniquerowsbyhashset/UniqueRowsByHashSetMetaTest.java +++ b/engine/test-src/org/pentaho/di/trans/steps/uniquerowsbyhashset/UniqueRowsByHashSetMetaTest.java @@ -2,7 +2,7 @@ * * Pentaho Data Integration * - * Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * @@ -66,7 +66,6 @@ public void testRoundTrip() throws KettleException { new LoadSaveTester( UniqueRowsByHashSetMeta.class, attributes, getterMap, setterMap, fieldLoadSaveValidatorAttributeMap, new HashMap>() ); - loadSaveTester.testRepoRoundTrip(); - loadSaveTester.testXmlRoundTrip(); + loadSaveTester.testSerialization(); } } diff --git a/engine/test-src/org/pentaho/di/trans/steps/univariatestats/UnivariateStatsMetaTest.java b/engine/test-src/org/pentaho/di/trans/steps/univariatestats/UnivariateStatsMetaTest.java index 44a051044a8e..70a963d46a77 100644 --- a/engine/test-src/org/pentaho/di/trans/steps/univariatestats/UnivariateStatsMetaTest.java +++ b/engine/test-src/org/pentaho/di/trans/steps/univariatestats/UnivariateStatsMetaTest.java @@ -2,7 +2,7 @@ * * Pentaho Data Integration * - * Copyright (C) 2002-2014 by Pentaho : http://www.pentaho.com + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com * ******************************************************************************* * @@ -144,8 +144,7 @@ public void loadSaveRoundTripTest() throws KettleException { new HashMap(), new HashMap>(), fieldLoadSaveValidatorTypeMap ); - loadSaveTester.testRepoRoundTrip(); - loadSaveTester.testXmlRoundTrip(); + loadSaveTester.testSerialization(); } private void assertContains( Map map, String key, Integer value ) { diff --git a/engine/test-src/org/pentaho/di/trans/steps/webservices/WebServiceMetaLoadSaveTest.java b/engine/test-src/org/pentaho/di/trans/steps/webservices/WebServiceMetaLoadSaveTest.java new file mode 100644 index 000000000000..6461ae85211b --- /dev/null +++ b/engine/test-src/org/pentaho/di/trans/steps/webservices/WebServiceMetaLoadSaveTest.java @@ -0,0 +1,101 @@ +/*! ****************************************************************************** + * + * Pentaho Data Integration + * + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com + * + ******************************************************************************* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + ******************************************************************************/ +package org.pentaho.di.trans.steps.webservices; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.UUID; + +import org.apache.commons.lang.builder.EqualsBuilder; +import org.junit.Before; +import org.junit.Test; +import org.pentaho.di.core.KettleEnvironment; +import org.pentaho.di.core.exception.KettleException; +import org.pentaho.di.core.plugins.PluginRegistry; +import org.pentaho.di.trans.steps.loadsave.LoadSaveTester; +import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator; +import org.pentaho.di.trans.steps.loadsave.validator.ListLoadSaveValidator; +import org.pentaho.di.trans.steps.webservices.wsdl.XsdType; + +public class WebServiceMetaLoadSaveTest { + LoadSaveTester loadSaveTester; + Class testMetaClass = WebServiceMeta.class; + + @Before + public void setUpLoadSave() throws Exception { + KettleEnvironment.init(); + PluginRegistry.init( true ); + List attributes = + Arrays.asList( "url", "operationName", "operationRequestName", "operationNamespace", "inFieldContainerName", + "inFieldArgumentName", "outFieldContainerName", "outFieldArgumentName", "proxyHost", "proxyPort", "httpLogin", + "httpPassword", "passingInputData", "callStep", "compatible", "repeatingElementName", "returningReplyAsString", + "fieldsIn", "fieldsOut" ); + + Map getterMap = new HashMap(); + Map setterMap = new HashMap(); + + Map> attrValidatorMap = new HashMap>(); + attrValidatorMap.put( "fieldsIn", + new ListLoadSaveValidator( new WebServiceFieldLoadSaveValidator(), 5 ) ); + attrValidatorMap.put( "fieldsOut", + new ListLoadSaveValidator( new WebServiceFieldLoadSaveValidator(), 5 ) ); + + Map> typeValidatorMap = new HashMap>(); + + loadSaveTester = + new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap ); + } + + @Test + public void testSerialization() throws KettleException { + loadSaveTester.testSerialization(); + } + + public class WebServiceFieldLoadSaveValidator implements FieldLoadSaveValidator { + final Random rand = new Random(); + @Override + public WebServiceField getTestObject() { + WebServiceField rtn = new WebServiceField(); + rtn.setName( UUID.randomUUID().toString() ); + rtn.setWsName( UUID.randomUUID().toString() ); + rtn.setXsdType( XsdType.TYPES[ rand.nextInt( XsdType.TYPES.length )] ); + return rtn; + } + + @Override + public boolean validateTestObject( WebServiceField testObject, Object actual ) { + if ( !( actual instanceof WebServiceField ) ) { + return false; + } + WebServiceField another = (WebServiceField) actual; + return new EqualsBuilder() + .append( testObject.getName(), another.getName() ) + .append( testObject.getWsName(), another.getWsName() ) + .append( testObject.getXsdType(), another.getXsdType() ) + .isEquals(); + } + } + +} diff --git a/engine/test-src/org/pentaho/di/trans/steps/xbaseinput/XBaseInputMetaTest.java b/engine/test-src/org/pentaho/di/trans/steps/xbaseinput/XBaseInputMetaTest.java new file mode 100644 index 000000000000..f7e4277e8a39 --- /dev/null +++ b/engine/test-src/org/pentaho/di/trans/steps/xbaseinput/XBaseInputMetaTest.java @@ -0,0 +1,66 @@ +/*! ****************************************************************************** + * + * Pentaho Data Integration + * + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com + * + ******************************************************************************* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + ******************************************************************************/ +package org.pentaho.di.trans.steps.xbaseinput; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.pentaho.di.core.KettleEnvironment; +import org.pentaho.di.core.exception.KettleException; +import org.pentaho.di.core.plugins.PluginRegistry; +import org.pentaho.di.trans.steps.loadsave.LoadSaveTester; +import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator; + +public class XBaseInputMetaTest { + LoadSaveTester loadSaveTester; + Class testMetaClass = XBaseInputMeta.class; + + @Before + public void setUpLoadSave() throws Exception { + KettleEnvironment.init(); + PluginRegistry.init( true ); + List attributes = + Arrays.asList( "dbfFileName", "rowLimit", "rowNrAdded", "rowNrField", "acceptingFilenames", "acceptingField", + "acceptingStepName", "includeFilename", "filenameField", "charactersetName" ); + + Map getterMap = new HashMap(); + getterMap.put( "includeFilename", "includeFilename" ); + + Map setterMap = new HashMap(); + + Map> attrValidatorMap = new HashMap>(); + + Map> typeValidatorMap = new HashMap>(); + + loadSaveTester = + new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap ); + } + + @Test + public void testSerialization() throws KettleException { + loadSaveTester.testSerialization(); + } +} diff --git a/engine/test-src/org/pentaho/di/trans/steps/zipfile/ZipFileMetaLoadSaveTest.java b/engine/test-src/org/pentaho/di/trans/steps/zipfile/ZipFileMetaLoadSaveTest.java new file mode 100644 index 000000000000..9023df140506 --- /dev/null +++ b/engine/test-src/org/pentaho/di/trans/steps/zipfile/ZipFileMetaLoadSaveTest.java @@ -0,0 +1,69 @@ +/*! ****************************************************************************** + * + * Pentaho Data Integration + * + * Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com + * + ******************************************************************************* + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + ******************************************************************************/ +package org.pentaho.di.trans.steps.zipfile; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.junit.Before; +import org.junit.Test; +import org.pentaho.di.core.KettleEnvironment; +import org.pentaho.di.core.exception.KettleException; +import org.pentaho.di.core.plugins.PluginRegistry; +import org.pentaho.di.trans.steps.loadsave.LoadSaveTester; +import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator; +import org.pentaho.di.trans.steps.loadsave.validator.IntLoadSaveValidator; + +public class ZipFileMetaLoadSaveTest { + LoadSaveTester loadSaveTester; + Class testMetaClass = ZipFileMeta.class; + + @Before + public void setUpLoadSave() throws Exception { + KettleEnvironment.init(); + PluginRegistry.init( true ); + List attributes = + Arrays.asList( "dynamicSourceFileNameField", "dynamicTargetFileNameField", "baseFolderField", "moveToFolderField", "addResultFilenames", + "overwriteZipEntry", "createParentFolder", "keepSouceFolder", "operationType" ); + + Map getterMap = new HashMap(); + getterMap.put( "addResultFilenames", "isaddTargetFileNametoResult" ); + Map setterMap = new HashMap(); + setterMap.put( "addResultFilenames", "setaddTargetFileNametoResult" ); + + Map> attrValidatorMap = new HashMap>(); + attrValidatorMap.put( "operationType", new IntLoadSaveValidator( ZipFileMeta.operationTypeCode.length ) ); + + Map> typeValidatorMap = new HashMap>(); + + loadSaveTester = + new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap ); + } + + @Test + public void testSerialization() throws KettleException { + loadSaveTester.testSerialization(); + } + +}