Skip to content

Commit

Permalink
[TEST] - More load/save tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mbatchelor committed May 25, 2016
1 parent e57915d commit d3a348f
Show file tree
Hide file tree
Showing 11 changed files with 620 additions and 49 deletions.
2 changes: 1 addition & 1 deletion engine/src/org/pentaho/di/trans/steps/mail/MailMeta.java
Expand Up @@ -2,7 +2,7 @@
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
Expand Down
Expand Up @@ -159,14 +159,17 @@ public MetaInjectMeta() {
}

// TODO: deep copy
@Override
public Object clone() {
Object retval = super.clone();
return retval;
}

@Override
public void setDefault() {
}

@Override
public String getXML() {
actualizeMetaInjectMapping();
StringBuilder retval = new StringBuilder( 500 );
Expand Down Expand Up @@ -196,8 +199,10 @@ public String getXML() {
retval.append( " " ).append( XMLHandler.addTagValue( TARGET_FILE, targetFile ) );
retval.append( " " ).append( XMLHandler.addTagValue( NO_EXECUTION, noExecution ) );

retval.append( " " ).append( XMLHandler.addTagValue( STREAM_SOURCE_STEP, streamSourceStep == null ? null
: streamSourceStep.getName() ) );
if ( streamSourceStepname == null ) {
streamSourceStepname = streamSourceStep.getName();
}
retval.append( " " ).append( XMLHandler.addTagValue( STREAM_SOURCE_STEP, streamSourceStepname ) );
retval.append( " " ).append( XMLHandler.addTagValue( STREAM_TARGET_STEP, streamTargetStepname ) );

retval.append( " " ).append( XMLHandler.openTag( MAPPINGS ) );
Expand All @@ -216,6 +221,7 @@ public String getXML() {
return retval.toString();
}

@Override
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
try {

Expand Down Expand Up @@ -266,6 +272,7 @@ public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore met
}
}

@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
throws KettleException {
try {
Expand All @@ -278,6 +285,8 @@ public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, Lis
directoryPath = rep.getStepAttributeString( id_step, DIRECTORY_PATH );

sourceStepName = rep.getStepAttributeString( id_step, SOURCE_STEP );
streamSourceStepname = rep.getStepAttributeString( id_step, STREAM_SOURCE_STEP );
streamTargetStepname = rep.getStepAttributeString( id_step, STREAM_TARGET_STEP );
sourceOutputFields = new ArrayList<MetaInjectOutputField>();
int nrSourceOutputFields = rep.countNrStepAttributes( id_step, SOURCE_OUTPUT_FIELD_NAME );
for ( int i = 0; i < nrSourceOutputFields; i++ ) {
Expand Down Expand Up @@ -308,6 +317,7 @@ public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, Lis
}
}

@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step )
throws KettleException {
try {
Expand All @@ -320,11 +330,15 @@ public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transform
rep.saveStepAttribute( id_transformation, id_step, DIRECTORY_PATH, directoryPath );

rep.saveStepAttribute( id_transformation, id_step, SOURCE_STEP, sourceStepName );
for ( MetaInjectOutputField field : sourceOutputFields ) {
rep.saveStepAttribute( id_transformation, id_step, SOURCE_OUTPUT_FIELD_NAME, field.getName() );
rep.saveStepAttribute( id_transformation, id_step, SOURCE_OUTPUT_FIELD_TYPE, field.getTypeDescription() );
rep.saveStepAttribute( id_transformation, id_step, SOURCE_OUTPUT_FIELD_LENGTH, field.getLength() );
rep.saveStepAttribute( id_transformation, id_step, SOURCE_OUTPUT_FIELD_PRECISION, field.getPrecision() );
rep.saveStepAttribute( id_transformation, id_step, STREAM_SOURCE_STEP, streamSourceStepname );
rep.saveStepAttribute( id_transformation, id_step, STREAM_TARGET_STEP, streamTargetStepname );
MetaInjectOutputField aField = null;
for ( int i = 0; i < sourceOutputFields.size(); i++ ) {
aField = sourceOutputFields.get( i );
rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_NAME, aField.getName() );
rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_TYPE, aField.getTypeDescription() );
rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_LENGTH, aField.getLength() );
rep.saveStepAttribute( id_transformation, id_step, i, SOURCE_OUTPUT_FIELD_PRECISION, aField.getPrecision() );
}

rep.saveStepAttribute( id_transformation, id_step, TARGET_FILE, targetFile );
Expand All @@ -346,6 +360,7 @@ public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transform
}
}

@Override
public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {

Expand All @@ -361,11 +376,13 @@ public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface
}
}

@Override
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr,
Trans trans ) {
return new MetaInject( stepMeta, stepDataInterface, cnr, tr, trans );
}

@Override
public StepDataInterface getStepData() {
return new MetaInjectData();
}
Expand Down Expand Up @@ -668,6 +685,7 @@ public void setNoExecution( boolean noExecution ) {
/**
* @return The objects referenced in the step, like a mapping, a transformation, a job, ...
*/
@Override
public String[] getReferencedObjectDescriptions() {
return new String[] { BaseMessages.getString( PKG, "MetaInjectMeta.ReferencedObject.Description" ), };
}
Expand All @@ -677,6 +695,7 @@ private boolean isTransformationDefined() {
.isEmpty( transName ) );
}

@Override
public boolean[] isReferencedObjectEnabled() {
return new boolean[] { isTransformationDefined(), };
}
Expand All @@ -686,6 +705,7 @@ public String getActiveReferencedObjectDescription() {
return BaseMessages.getString( PKG, "MetaInjectMeta.ReferencedObjectAfterInjection.Description" );
}

@Override
@Deprecated
public Object loadReferencedObject( int index, Repository rep, VariableSpace space ) throws KettleException {
return loadReferencedObject( index, rep, null, space );
Expand All @@ -707,6 +727,7 @@ public Object loadReferencedObject( int index, Repository rep, VariableSpace spa
* @return the referenced object once loaded
* @throws KettleException
*/
@Override
public Object loadReferencedObject( int index, Repository rep, IMetaStore metaStore, VariableSpace space )
throws KettleException {
return loadTransformationMeta( this, rep, metaStore, space );
Expand Down
Expand Up @@ -127,7 +127,7 @@ public ExcelInputField getTestObject() {

@Override
public boolean validateTestObject( ExcelInputField testObject, Object actual ) {
if ( !( actual instanceof ExcelInputField) ) {
if ( !( actual instanceof ExcelInputField ) ) {
return false;
}
ExcelInputField another = (ExcelInputField) actual;
Expand Down
@@ -0,0 +1,45 @@
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.loadsave.validator;

import java.util.Random;
import java.util.UUID;

import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.StringObjectId;

public class ObjectIdLoadSaveValidator implements FieldLoadSaveValidator<ObjectId> {
final Random rand = new Random();
@Override
public ObjectId getTestObject() {
return new StringObjectId( UUID.randomUUID().toString() );
}

@Override
public boolean validateTestObject( ObjectId testObject, Object actual ) {
if ( !( actual instanceof ObjectId ) ) {
return false;
}
ObjectId actualInput = (ObjectId) actual;
return ( testObject.equals( actualInput ) );
}
}
@@ -0,0 +1,45 @@
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.loadsave.validator;

import java.util.Random;

import org.pentaho.di.core.ObjectLocationSpecificationMethod;

public class ObjectLocationSpecificationMethodLoadSaveValidator implements FieldLoadSaveValidator<ObjectLocationSpecificationMethod> {
final Random rand = new Random();
@Override
public ObjectLocationSpecificationMethod getTestObject() {
ObjectLocationSpecificationMethod[] methods = ObjectLocationSpecificationMethod.values();
ObjectLocationSpecificationMethod rtn = methods[ rand.nextInt( methods.length ) ];
return rtn;
}

@Override
public boolean validateTestObject( ObjectLocationSpecificationMethod testObject, Object actual ) {
if ( !( actual instanceof ObjectLocationSpecificationMethod ) ) {
return false;
}
ObjectLocationSpecificationMethod actualInput = (ObjectLocationSpecificationMethod) actual;
return ( testObject.equals( actualInput ) );
}
}
@@ -0,0 +1,82 @@
/*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2016 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.mailvalidator;

import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.junit.Before;
import org.junit.Test;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.plugins.PluginRegistry;
import org.pentaho.di.trans.steps.loadsave.LoadSaveTester;
import org.pentaho.di.trans.steps.loadsave.validator.ArrayLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.FieldLoadSaveValidator;
import org.pentaho.di.trans.steps.loadsave.validator.StringLoadSaveValidator;

public class MailValidatorMetaTest {
LoadSaveTester loadSaveTester;
Class<MailValidatorMeta> testMetaClass = MailValidatorMeta.class;

@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "emailField", "ResultAsString", "SMTPCheck", "eMailValideMsg", "eMailNotValideMsg",
"errorsField", "timeOut", "defaultSMTP", "emailSender", "defaultSMTPField", "isdynamicDefaultSMTP",
"resultFieldName" );

Map<String, String> getterMap = new HashMap<String, String>() {
{
put( "emailSender", "geteMailSender" );
put( "isdynamicDefaultSMTP", "isdynamicDefaultSMTP" );
}
};
Map<String, String> setterMap = new HashMap<String, String>() {
{
put( "emailField", "setEmailfield" );
put( "eMailValideMsg", "setEmailValideMsg" );
put( "eMailNotValideMsg", "setEmailNotValideMsg" );
put( "emailSender", "seteMailSender" );
put( "isdynamicDefaultSMTP", "setdynamicDefaultSMTP" );
}
};
FieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator =
new ArrayLoadSaveValidator<String>( new StringLoadSaveValidator(), 5 );


Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();

loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap );
}

@Test
public void testSerialization() throws KettleException {
loadSaveTester.testSerialization();
}
}

0 comments on commit d3a348f

Please sign in to comment.