Skip to content

Commit

Permalink
[TEST] - Add more load/save tests, move ConditionLoadSaveTester for o…
Browse files Browse the repository at this point in the history
…ther

tests
  • Loading branch information
mbatchelor committed May 24, 2016
1 parent cb51bdb commit bcfd980
Show file tree
Hide file tree
Showing 17 changed files with 756 additions and 121 deletions.
28 changes: 27 additions & 1 deletion engine/src/org/pentaho/di/trans/steps/groupby/GroupByMeta.java
Expand Up @@ -302,6 +302,7 @@ public void setValueField( String[] valueField ) {
this.valueField = valueField; this.valueField = valueField;
} }


@Override
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
readData( stepnode ); readData( stepnode );
} }
Expand All @@ -314,8 +315,25 @@ public void allocate( int sizegroup, int nrfields ) {
valueField = new String[ nrfields ]; valueField = new String[ nrfields ];
} }


@Override
public Object clone() { public Object clone() {
Object retval = super.clone(); GroupByMeta retval = (GroupByMeta) super.clone();

int szGroup = 0, szFields = 0;
if ( groupField != null ) {
szGroup = groupField.length;
}
if ( valueField != null ) {
szFields = valueField.length;
}
retval.allocate( szGroup, szFields );

System.arraycopy( groupField, 0, retval.groupField, 0, szGroup );
System.arraycopy( aggregateField, 0, retval.aggregateField, 0, szFields );
System.arraycopy( subjectField, 0, retval.subjectField, 0, szFields );
System.arraycopy( aggregateType, 0, retval.aggregateType, 0, szFields );
System.arraycopy( valueField, 0, retval.valueField, 0, szFields );

return retval; return retval;
} }


Expand Down Expand Up @@ -399,6 +417,7 @@ public static final String getTypeDescLong( int i ) {
return typeGroupLongDesc[ i ]; return typeGroupLongDesc[ i ];
} }


@Override
public void setDefault() { public void setDefault() {
directory = "%%java.io.tmpdir%%"; directory = "%%java.io.tmpdir%%";
prefix = "grp"; prefix = "grp";
Expand All @@ -413,6 +432,7 @@ public void setDefault() {
allocate( sizeGroup, numberOfFields ); allocate( sizeGroup, numberOfFields );
} }


@Override
public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) { VariableSpace space, Repository repository, IMetaStore metaStore ) {
// re-assemble a new row of metadata // re-assemble a new row of metadata
Expand Down Expand Up @@ -523,6 +543,7 @@ public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface
rowMeta.addRowMeta( fields ); rowMeta.addRowMeta( fields );
} }


@Override
public String getXML() { public String getXML() {
StringBuilder retval = new StringBuilder( 500 ); StringBuilder retval = new StringBuilder( 500 );


Expand Down Expand Up @@ -557,6 +578,7 @@ public String getXML() {
return retval.toString(); return retval.toString();
} }


@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
throws KettleException { throws KettleException {
try { try {
Expand Down Expand Up @@ -597,6 +619,7 @@ public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, Lis
} }
} }


@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step )
throws KettleException { throws KettleException {
try { try {
Expand Down Expand Up @@ -626,6 +649,7 @@ public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transform
} }
} }


@Override
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
Repository repository, IMetaStore metaStore ) { Repository repository, IMetaStore metaStore ) {
Expand All @@ -644,11 +668,13 @@ public void check( List<CheckResultInterface> remarks, TransMeta transMeta, Step
} }
} }


@Override
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans ) { TransMeta transMeta, Trans trans ) {
return new GroupBy( stepMeta, stepDataInterface, cnr, transMeta, trans ); return new GroupBy( stepMeta, stepDataInterface, cnr, transMeta, trans );
} }


@Override
public StepDataInterface getStepData() { public StepDataInterface getStepData() {
return new GroupByData(); return new GroupByData();
} }
Expand Down
Expand Up @@ -78,6 +78,7 @@ public InfobrightLoaderMeta() {
* @see org.pentaho.di.trans.step.StepMetaInterface#getStep(org.pentaho.di.trans.step.StepMeta, * @see org.pentaho.di.trans.step.StepMetaInterface#getStep(org.pentaho.di.trans.step.StepMeta,
* org.pentaho.di.trans.step.StepDataInterface, int, org.pentaho.di.trans.TransMeta, org.pentaho.di.trans.Trans) * org.pentaho.di.trans.step.StepDataInterface, int, org.pentaho.di.trans.TransMeta, org.pentaho.di.trans.Trans)
*/ */
@Override
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr,
Trans trans ) { Trans trans ) {
InfobrightLoader loader = new InfobrightLoader( stepMeta, stepDataInterface, cnr, tr, trans ); InfobrightLoader loader = new InfobrightLoader( stepMeta, stepDataInterface, cnr, tr, trans );
Expand All @@ -89,6 +90,7 @@ public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInter
* *
* @see org.pentaho.di.trans.step.StepMetaInterface#getStepData() * @see org.pentaho.di.trans.step.StepMetaInterface#getStepData()
*/ */
@Override
public StepDataInterface getStepData() { public StepDataInterface getStepData() {
return new InfobrightLoaderData(); return new InfobrightLoaderData();
} }
Expand All @@ -98,6 +100,7 @@ public StepDataInterface getStepData() {
* *
* @see org.pentaho.di.trans.step.BaseStepMeta#clone() * @see org.pentaho.di.trans.step.BaseStepMeta#clone()
*/ */
@Override
public Object clone() { public Object clone() {
InfobrightLoaderMeta retval = (InfobrightLoaderMeta) super.clone(); InfobrightLoaderMeta retval = (InfobrightLoaderMeta) super.clone();
return retval; return retval;
Expand All @@ -111,6 +114,11 @@ public void setDataFormat( DataFormat dataFormat ) {
this.dataFormat = dataFormat; this.dataFormat = dataFormat;
} }


public DataFormat getDataFormat() {
return this.dataFormat;
}

@Override
public void setDefault() { public void setDefault() {
this.dataFormat = DataFormat.TXT_VARIABLE; // default for ICE this.dataFormat = DataFormat.TXT_VARIABLE; // default for ICE
// this.dataFormat = DataFormat.BINARY; // default for IEE // this.dataFormat = DataFormat.BINARY; // default for IEE
Expand Down
Expand Up @@ -133,6 +133,7 @@ public IngresVectorwiseLoaderMeta() {
* @see org.pentaho.di.trans.step.StepMetaInterface#getStep(org.pentaho.di.trans.step.StepMeta, * @see org.pentaho.di.trans.step.StepMetaInterface#getStep(org.pentaho.di.trans.step.StepMeta,
* org.pentaho.di.trans.step.StepDataInterface, int, org.pentaho.di.trans.TransMeta, org.pentaho.di.trans.Trans) * org.pentaho.di.trans.step.StepDataInterface, int, org.pentaho.di.trans.TransMeta, org.pentaho.di.trans.Trans)
*/ */
@Override
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr,
Trans trans ) { Trans trans ) {
IngresVectorwiseLoader loader = new IngresVectorwiseLoader( stepMeta, stepDataInterface, cnr, tr, trans ); IngresVectorwiseLoader loader = new IngresVectorwiseLoader( stepMeta, stepDataInterface, cnr, tr, trans );
Expand All @@ -144,6 +145,7 @@ public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInter
* *
* @see org.pentaho.di.trans.step.StepMetaInterface#getStepData() * @see org.pentaho.di.trans.step.StepMetaInterface#getStepData()
*/ */
@Override
public StepDataInterface getStepData() { public StepDataInterface getStepData() {
return new IngresVectorwiseLoaderData(); return new IngresVectorwiseLoaderData();
} }
Expand All @@ -153,11 +155,13 @@ public StepDataInterface getStepData() {
* *
* @see org.pentaho.di.trans.step.BaseStepMeta#clone() * @see org.pentaho.di.trans.step.BaseStepMeta#clone()
*/ */
@Override
public Object clone() { public Object clone() {
IngresVectorwiseLoaderMeta retval = (IngresVectorwiseLoaderMeta) super.clone(); IngresVectorwiseLoaderMeta retval = (IngresVectorwiseLoaderMeta) super.clone();
return retval; return retval;
} }


@Override
public void setDefault() { public void setDefault() {
allocate( 0 ); allocate( 0 );
sqlPath = "/opt/Ingres/IngresVW/ingres/bin/sql"; sqlPath = "/opt/Ingres/IngresVW/ingres/bin/sql";
Expand Down Expand Up @@ -193,6 +197,7 @@ public void allocate( int nrRows ) {
fieldDatabase = new String[nrRows]; fieldDatabase = new String[nrRows];
} }


@Override
public String getXML() { public String getXML() {
StringBuilder retval = new StringBuilder(); StringBuilder retval = new StringBuilder();


Expand Down Expand Up @@ -268,6 +273,7 @@ public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore met
} }
} }


@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
try { try {
databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases ); databaseMeta = rep.loadDatabaseMetaFromStepAttribute( id_step, "id_connection", databases );
Expand Down Expand Up @@ -303,6 +309,7 @@ public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, Lis
} }
} }


@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
try { try {
rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta ); rep.saveDatabaseMetaStepAttribute( id_transformation, id_step, "id_connection", databaseMeta );
Expand Down Expand Up @@ -343,6 +350,7 @@ public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transform
/** /**
* @return the databaseMeta * @return the databaseMeta
*/ */
@Override
public DatabaseMeta getDatabaseMeta() { public DatabaseMeta getDatabaseMeta() {
return databaseMeta; return databaseMeta;
} }
Expand All @@ -358,6 +366,7 @@ public void setDatabaseMeta( DatabaseMeta databaseMeta ) {
/** /**
* @return the tablename * @return the tablename
*/ */
@Override
public String getTableName() { public String getTableName() {
return tablename; return tablename;
} }
Expand Down Expand Up @@ -415,6 +424,7 @@ public void setFieldFormat( String[] fieldFormat ) {
this.fieldFormat = fieldFormat; this.fieldFormat = fieldFormat;
} }


@Override
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
Repository repository, IMetaStore metaStore ) { Repository repository, IMetaStore metaStore ) {
SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default: SQLStatement retval = new SQLStatement( stepMeta.getName(), databaseMeta, null ); // default:
Expand Down Expand Up @@ -651,4 +661,14 @@ public String getSchemaName() {
public String getMissingDatabaseConnectionInformationMessage() { public String getMissingDatabaseConnectionInformationMessage() {
return null; return null;
} }

@Override
public DatabaseMeta[] getUsedDatabaseConnections() {
if ( databaseMeta != null ) {
return new DatabaseMeta[] { databaseMeta };
} else {
return super.getUsedDatabaseConnections();
}
}

} }
Expand Up @@ -25,16 +25,14 @@
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Random;
import java.util.UUID;


import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.pentaho.di.core.Condition;
import org.pentaho.di.core.KettleEnvironment; import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry; import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester; import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
import org.pentaho.di.trans.steps.loadsave.validator.ConditionLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator; import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator;


public class FilterRowsMetaTest { public class FilterRowsMetaTest {
Expand Down Expand Up @@ -65,31 +63,4 @@ public void testSerialization() throws KettleException {
loadSaveTester.testSerialization(); loadSaveTester.testSerialization();
} }


public class ConditionLoadSaveValidator implements FieldLoadSaveValidator<Condition> {
final Random rand = new Random();
@Override
public Condition getTestObject() {
Condition rtn = new Condition();
rtn.setFunction( rand.nextInt( Condition.functions.length ) );
rtn.setLeftValuename( UUID.randomUUID().toString() );
rtn.setNegated( rand.nextBoolean() );
rtn.setOperator( rand.nextInt( Condition.operators.length ) );
rtn.setRightValuename( UUID.randomUUID().toString() );
return rtn;
}

@Override
public boolean validateTestObject( Condition testObject, Object actual ) {
if ( !( actual instanceof Condition ) ) {
return false;
}
Condition another = (Condition) actual;
try {
return ( testObject.getXML().equals( another.getXML() ) );
} catch ( KettleException ex ) {
throw new RuntimeException( ex );
}
}
}

} }
@@ -0,0 +1,95 @@
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.groupby;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.junit.Before;
import org.junit.Test;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
import org.pentaho.di.trans.steps.loadsave.initializer.InitializerInterface;
import org.pentaho.di.trans.steps.loadsave.validator.ArrayLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.IntLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.PrimitiveIntArrayLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.StringLoadSaveValidator;

public class GroupByMetaTest implements InitializerInterface<StepMetaInterface> {
LoadSaveTester loadSaveTester;
Class<GroupByMeta> testMetaClass = GroupByMeta.class;

@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "passAllRows", "directory", "prefix", "aggregateIgnored", "aggregateIgnoredField", "addingLineNrInGroup",
"lineNrInGroupField", "alwaysGivingBackOneRow", "groupField", "aggregateField", "subjectField",
"aggregateType", "valueField" );

Map<String, String> getterMap = new HashMap<String, String>() {
{
put( "passAllRows", "passAllRows" );
}
};
Map<String, String> setterMap = new HashMap<String, String>();

FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator =
new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 5 );


Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
attrValidatorMap.put( "groupField", stringArrayLoadSaveValidator );
attrValidatorMap.put( "aggregateField", stringArrayLoadSaveValidator );
attrValidatorMap.put( "subjectField", stringArrayLoadSaveValidator );
attrValidatorMap.put( "valueField", stringArrayLoadSaveValidator );
attrValidatorMap.put( "aggregateType",
new PrimitiveIntArrayLoadSaveValidator( new IntLoadSaveValidator( GroupByMeta.typeGroupCode.length ), 5 ) );

Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();

loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, new ArrayList<String>(), new ArrayList<String>(),
getterMap, setterMap, attrValidatorMap, typeValidatorMap, this );
}

// Call the allocate method on the LoadSaveTester meta class
@Override
public void modify( StepMetaInterface someMeta ) {
if ( someMeta instanceof GroupByMeta ) {
( (GroupByMeta) someMeta ).allocate( 5, 5 );
}
}

@Test
public void testSerialization() throws KettleException {
loadSaveTester.testSerialization();
}
}

0 comments on commit bcfd980

Please sign in to comment.