Java Code Examples for org.pentaho.di.trans.TransMeta#addStep()

The following examples show how to use org.pentaho.di.trans.TransMeta#addStep() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SpoonStepsDelegate.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void dupeStep( TransMeta transMeta, StepMeta stepMeta ) {
  spoon.getLog().logDebug(
    toString(), BaseMessages.getString( PKG, "Spoon.Log.DuplicateStep" ) + stepMeta.getName() ); // Duplicate
  // step:

  StepMeta stMeta = (StepMeta) stepMeta.clone();
  if ( stMeta != null ) {
    String newname = transMeta.getAlternativeStepname( stepMeta.getName() );
    int nr = 2;
    while ( transMeta.findStep( newname ) != null ) {
      newname = stepMeta.getName() + " (copy " + nr + ")";
      nr++;
    }
    stMeta.setName( newname );
    // Don't select this new step!
    stMeta.setSelected( false );
    Point loc = stMeta.getLocation();
    stMeta.setLocation( loc.x + 20, loc.y + 20 );
    transMeta.addStep( stMeta );
    spoon.addUndoNew( transMeta, new StepMeta[] { (StepMeta) stMeta.clone() }, new int[] { transMeta
      .indexOfStep( stMeta ) } );
    spoon.refreshTree();
    spoon.refreshGraph();
  }
}
 
Example 2
Source File: MappingInputFieldsTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Before
public void setUp() throws Exception {
  meta = new MappingInputMeta();
  meta.setFieldName( new String[] { "n2", "n4" } );
  meta.setFieldType( new int[] { ValueMetaInterface.TYPE_INTEGER, ValueMetaInterface.TYPE_INTEGER } );
  meta.setFieldLength( new int[] { 0, 0 } );
  meta.setFieldPrecision( new int[] { 0, 0 } );

  StepMeta sm = new StepMeta( "MappingInput", "SubTrans", meta );
  TransMeta tm = new TransMeta();
  tm.addStep( sm );
  LoggingObjectInterface loi = new SimpleLoggingObject( "lo", LoggingObjectType.STEP, null );
  Trans tr = new Trans( tm, loi );

  step = new MappingInput( sm, null, 0, tm, tr );
  step.getTrans().setRunning( true );
}
 
Example 3
Source File: GetRepositoryNamesTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private void init( Repository repository, String directoryName, boolean includeSubFolders, String nameMask, String exludeNameMask,
    ObjectTypeSelection typeSelection, int itemCount ) throws KettleException {

  VariableSpace vars = new Variables();
  vars.setVariable( "DirName", "/subdir1" );
  vars.setVariable( "IncludeMask", ".*" );
  vars.setVariable( "ExcludeMask", "" );

  GetRepositoryNamesMeta meta = new GetRepositoryNamesMeta();
  meta.setDirectory( new String[] { directoryName } );
  meta.setNameMask( new String[] { nameMask } );
  meta.setExcludeNameMask( new String[] { exludeNameMask } );
  meta.setIncludeSubFolders( new boolean[] { includeSubFolders } );
  meta.setObjectTypeSelection( typeSelection );
  StepMeta stepMeta = new StepMeta( "GetRepoNamesStep", meta );

  TransMeta transMeta = new TransMeta( vars );
  transMeta.setRepository( repository );
  transMeta.addStep( stepMeta );

  GetRepositoryNamesData data = (GetRepositoryNamesData) meta.getStepData();
  GetRepositoryNames step = new GetRepositoryNames( stepMeta, data, 0, transMeta, new Trans( transMeta ) );
  step.init( meta, data );
  assertNotNull( data.list );
  assertEquals( itemCount, data.list.size() );
}
 
Example 4
Source File: SharedObjectSyncUtilTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void synchronizeSteps() throws Exception {
  final String stepName = "SharedStep";
  TransMeta transformarion1 = createTransMeta();
  StepMeta step1 = createStepMeta( stepName, true );
  transformarion1.addStep( step1 );
  spoonDelegates.trans.addTransformation( transformarion1 );

  TransMeta transformarion2 = createTransMeta();
  StepMeta step2 = createStepMeta( stepName, true );
  transformarion2.addStep( step2 );
  spoonDelegates.trans.addTransformation( transformarion2 );

  step2.setDescription( AFTER_SYNC_VALUE );
  sharedUtil.synchronizeSteps( step2 );
  assertThat( step1.getDescription(), equalTo( AFTER_SYNC_VALUE ) );
}
 
Example 5
Source File: BaseStreamingDialog.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
protected TransMeta createSubTransMeta() {
  RecordsFromStreamMeta rm = new RecordsFromStreamMeta();
  String[] fieldNames = getFieldNames();
  int[] empty = new int[ fieldNames.length ];
  Arrays.fill( empty, -1 );
  rm.setFieldname( fieldNames );
  rm.setType( getFieldTypes() );
  rm.setLength( empty );
  rm.setPrecision( empty );

  StepMeta recsFromStream = new StepMeta( "RecordsFromStream", "Get records from stream", rm );
  recsFromStream.setLocation( new Point( 100, 100 ) );
  recsFromStream.setDraw( true );

  TransMeta transMeta = new TransMeta();
  transMeta.addStep( recsFromStream );
  transMeta.setFilename( "" );

  return transMeta;
}
 
Example 6
Source File: TransMetaConverterTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void testMultipleDisabledHops() {
  TransMeta trans = new TransMeta();
  StepMeta input = new StepMeta( "Input", stepMetaInterface );
  trans.addStep( input );
  StepMeta step1 = new StepMeta( "Step1", stepMetaInterface );
  trans.addStep( step1 );
  StepMeta step2 = new StepMeta( "Step2", stepMetaInterface );
  trans.addStep( step2 );
  StepMeta step3 = new StepMeta( "Step3", stepMetaInterface );
  trans.addStep( step3 );

  TransHopMeta hop1 = new TransHopMeta( input, step1, false );
  TransHopMeta hop2 = new TransHopMeta( step1, step2, false );
  TransHopMeta hop3 = new TransHopMeta( step2, step3, false );
  trans.addTransHop( hop1 );
  trans.addTransHop( hop2 );
  trans.addTransHop( hop3 );

  Transformation transformation = TransMetaConverter.convert( trans );
  assertThat( "Trans has steps though all of them should be removed", transformation.getOperations().size(),
      is( 0 ) );
  assertThat( "Trans has hops though all of them should be removed", transformation.getHops().size(), is( 0 ) );
}
 
Example 7
Source File: TransMetaConverterTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void transWithHops() {
  TransMeta meta = new TransMeta();
  meta.setFilename( "fileName" );
  StepMeta from = new StepMeta( "step1", stepMetaInterface );
  meta.addStep( from );
  StepMeta to = new StepMeta( "step2", stepMetaInterface );
  meta.addStep( to );
  meta.addTransHop( new TransHopMeta( from, to ) );
  Transformation trans = TransMetaConverter.convert( meta );
  assertThat( trans.getId(), is( meta.getFilename() ) );
  assertThat( trans.getOperations().size(), is( 2 ) );
  assertThat( trans.getHops().size(), is( 1 ) );
  assertThat( trans.getHops().get( 0 ).getFrom().getId(), is( from.getName() ) );
  assertThat( trans.getHops().get( 0 ).getTo().getId(), is( to.getName() ) );

  assertThat(
    trans.getHops().stream().map( Hop::getType ).collect( Collectors.toList() ),
    everyItem( is( Hop.TYPE_NORMAL ) )
  );
}
 
Example 8
Source File: RestTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testCreateMultivalueMap() {
  StepMeta stepMeta = new StepMeta();
  stepMeta.setName( "TestRest" );
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "TestRest" );
  transMeta.addStep( stepMeta );
  Rest rest = new Rest( stepMeta, mock( StepDataInterface.class ),
    1, transMeta, mock( Trans.class ) );
  MultivaluedMapImpl map = rest.createMultivalueMap( "param1", "{a:{[val1]}}" );
  String val1 = map.getFirst( "param1" );
  assertTrue( val1.contains( "%7D" ) );
}
 
Example 9
Source File: MySQLBulkLoaderTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "MysqlBulkLoader" );

  Map<String, String> vars = new HashMap<String, String>();
  vars.put( "delim", "," );
  vars.put( "enclos", "'" );
  vars.put( "charset", "UTF8" );
  vars.put( "tbl", "sometable" );
  vars.put( "schema", "someschema" );
  transMeta.injectVariables( vars );
  MySQLDatabaseMeta mysql = new MySQLDatabaseMeta();
  mysql.setName( "MySQL" );
  DatabaseMeta dbMeta = new DatabaseMeta();
  dbMeta.setDatabaseInterface( mysql );
  dbMeta.setQuoteAllFields( true );
  lmeta = new MySQLBulkLoaderMeta();
  lmeta.setDelimiter( "${delim}" );
  lmeta.setEnclosure( "${enclos}" );
  lmeta.setEncoding( "${charset}" );
  lmeta.setTableName( "${tbl}" );
  lmeta.setSchemaName( "${schema}" );
  lmeta.setDatabaseMeta( dbMeta );
  ldata = new MySQLBulkLoaderData();
  PluginRegistry plugReg = PluginRegistry.getInstance();
  String mblPid = plugReg.getPluginId( StepPluginType.class, lmeta );
  smeta = new StepMeta( mblPid, "MySqlBulkLoader", lmeta );
  Trans trans = new Trans( transMeta );
  transMeta.addStep( smeta );
  lder = new MySQLBulkLoader( smeta, ldata, 1, transMeta, trans );
  lder.copyVariablesFrom( transMeta );
}
 
Example 10
Source File: WebServiceIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void testProcessRow() throws Exception {
  KettleEnvironment.init();

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "WebServiceTest" );

  PluginRegistry registry = PluginRegistry.getInstance();

  //
  // create an injector step...
  //
  String injectorStepname = "injector step";
  InjectorMeta im = new InjectorMeta();

  // Set the information of the injector.
  String injectorPid = registry.getPluginId( StepPluginType.class, im );
  StepMeta injectorStep = new StepMeta( injectorPid, injectorStepname, im );
  transMeta.addStep( injectorStep );

  //
  // Create a dummy step 1
  //
  String dummyStepname1 = "dummy step 1";
  DummyTransMeta dm1 = new DummyTransMeta();

  String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 );
  StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 );
  transMeta.addStep( dummyStep1 );

  TransHopMeta hi = new TransHopMeta( injectorStep, dummyStep1 );
  transMeta.addTransHop( hi );

  //
  // Create a String Cut step
  //
  String webServiceStepname = "web service step";
  WebServiceMeta scm = new WebServiceMeta();

  // scm.setUrl(HTTP_LOCALHOST_9998+ "wsdl");
  // scm.setOperationName("CelciusToFahrenheit");
  DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
  DocumentBuilder db = dbf.newDocumentBuilder();
  Document doc = db.parse( new InputSource( new java.io.StringReader( STEP_META ) ) );
  scm.loadXML( doc.getFirstChild(), null, (IMetaStore) null );

  String webServicePid = registry.getPluginId( StepPluginType.class, scm );
  StepMeta webServiceStep = new StepMeta( webServicePid, webServiceStepname, scm );
  transMeta.addStep( webServiceStep );

  TransHopMeta hi2 = new TransHopMeta( dummyStep1, webServiceStep );
  transMeta.addTransHop( hi2 );

  //
  // Create a dummy step 2
  //
  String dummyStepname2 = "dummy step 2";
  DummyTransMeta dm2 = new DummyTransMeta();

  String dummyPid2 = registry.getPluginId( StepPluginType.class, dm2 );
  StepMeta dummyStep2 = new StepMeta( dummyPid2, dummyStepname2, dm2 );
  transMeta.addStep( dummyStep2 );

  TransHopMeta hi3 = new TransHopMeta( webServiceStep, dummyStep2 );
  transMeta.addTransHop( hi3 );

  // Now execute the transformation...
  Trans trans = new Trans( transMeta );

  trans.prepareExecution( null );

  StepInterface si = trans.getStepInterface( dummyStepname1, 0 );
  RowStepCollector dummyRc1 = new RowStepCollector();
  si.addRowListener( dummyRc1 );

  si = trans.getStepInterface( webServiceStepname, 0 );
  RowStepCollector webServiceRc = new RowStepCollector();
  si.addRowListener( webServiceRc );

  RowProducer rp = trans.addRowProducer( injectorStepname, 0 );
  trans.startThreads();

  // add rows
  List<RowMetaAndData> inputList = createData( createRowMetaInterface(), new Object[][] { new Object[] { 10 } } );
  for ( RowMetaAndData rm : inputList ) {
    rp.putRow( rm.getRowMeta(), rm.getData() );
  }
  rp.finished();

  trans.waitUntilFinished();

  List<RowMetaAndData> goldRows =
    createData( createOutputRowMetaInterface(), new Object[][] { new Object[] { 10,
      new BigDecimal( 20 ) } } );
  List<RowMetaAndData> resultRows2 = webServiceRc.getRowsWritten();
  assertEquals( goldRows, resultRows2 );
}
 
Example 11
Source File: ParameterSimpleTransIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Test case for parameters using a simple transformation. Here 1 parameter is not provided as value, so the default
 * will be used.
 *
 * @throws Exception
 *           exception on any problem.
 */
public void testParameterSimpleTrans2() throws Exception {
  KettleEnvironment.init();

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "parameter_simple_trans2" );

  PluginRegistry registry = PluginRegistry.getInstance();

  //
  // create a get variables step...
  //
  String getVariablesStepname = "get variables step";
  GetVariableMeta gvm = new GetVariableMeta();

  // Set the information of the get variables step.
  String getVariablesPid = registry.getPluginId( StepPluginType.class, gvm );
  StepMeta getVariablesStep = new StepMeta( getVariablesPid, getVariablesStepname, gvm );
  transMeta.addStep( getVariablesStep );

  //
  // Generate 1 row
  //
  String[] fieldName = { "Param1", "PARAM2" };
  String[] varName = { "${Param1}", "%%PARAM2%%" };
  int[] fieldType = { ValueMetaInterface.TYPE_STRING, ValueMetaInterface.TYPE_STRING };
  int[] length = { -1, -1 };
  int[] precision = { -1, -1 };
  String[] format = { "", "" };
  String[] currency = { "", "" };
  String[] decimal = { "", "" };
  String[] grouping = { "", "" };
  int[] trimType = { ValueMetaInterface.TRIM_TYPE_NONE, ValueMetaInterface.TRIM_TYPE_NONE };

  FieldDefinition[] fields = new FieldDefinition[fieldName.length];
  for ( int i = 0; i < fields.length; i++ ) {
    FieldDefinition field = new FieldDefinition();
    field.setFieldName( fieldName[i] );
    field.setVariableString( varName[i] );
    field.setFieldType( fieldType[i] );
    field.setFieldLength( length[i] );
    field.setFieldPrecision( precision[i] );
    field.setFieldFormat( format[i] );
    field.setCurrency( currency[i] );
    field.setDecimal( decimal[i] );
    field.setGroup( grouping[i] );
    field.setTrimType( trimType[i] );
    fields[i] = field;
  }
  gvm.setFieldDefinitions( fields );

  //
  // Create a dummy step 1
  //
  String dummyStepname1 = "dummy step 1";
  DummyTransMeta dm1 = new DummyTransMeta();

  String dummyPid1 = registry.getPluginId( StepPluginType.class, dm1 );
  StepMeta dummyStep1 = new StepMeta( dummyPid1, dummyStepname1, dm1 );
  transMeta.addStep( dummyStep1 );

  TransHopMeta hi1 = new TransHopMeta( getVariablesStep, dummyStep1 );
  transMeta.addTransHop( hi1 );

  // Now execute the transformation...
  Trans trans = new Trans( transMeta );
  trans.addParameterDefinition( "Param1", "default1", "Parameter 1" );
  trans.addParameterDefinition( "PARAM2", "default2", "Parameter 2" );
  trans.setParameterValue( "Param1", "ParamValue1" );
  // PARAM2 is not set

  trans.prepareExecution( null );

  StepInterface si = trans.getStepInterface( dummyStepname1, 0 );
  RowStepCollector endRc = new RowStepCollector();
  si.addRowListener( endRc );

  trans.startThreads();

  trans.waitUntilFinished();

  // Now check whether the output is still as we expect.
  List<RowMetaAndData> goldenImageRows = createResultData2();
  List<RowMetaAndData> resultRows1 = endRc.getRowsWritten();
  checkRows( resultRows1, goldenImageRows );
}
 
Example 12
Source File: UniqueRowsIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void testSortCaseInsensitiveUniqueCaseInsensitive() throws Exception {
  KettleEnvironment.init();

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "uniquerowstest" );

  PluginRegistry registry = PluginRegistry.getInstance();

  //
  // create an injector step...
  //
  String injectorStepname = "injector step";
  InjectorMeta im = new InjectorMeta();

  // Set the information of the injector.
  String injectorPid = registry.getPluginId( StepPluginType.class, im );
  StepMeta injectorStep = new StepMeta( injectorPid, injectorStepname, im );
  transMeta.addStep( injectorStep );

  //
  // Create a sort rows step
  //
  String sortRowsStepname = "sort rows step";
  SortRowsMeta srm = new SortRowsMeta();
  srm.setFieldName( new String[] { "KEY" } );
  srm.setAscending( new boolean[] { true } );
  srm.setCaseSensitive( new boolean[] { false } );
  srm.setPreSortedField( new boolean[] { false } );
  srm.setPrefix( "SortRowsTest" );
  srm.setDirectory( "." );

  String sortRowsStepPid = registry.getPluginId( StepPluginType.class, srm );
  StepMeta sortRowsStep = new StepMeta( sortRowsStepPid, sortRowsStepname, srm );
  transMeta.addStep( sortRowsStep );

  transMeta.addTransHop( new TransHopMeta( injectorStep, sortRowsStep ) );

  //
  // Create a unique rows step
  //
  String uniqueRowsStepname = "unique rows step";
  UniqueRowsMeta urm = new UniqueRowsMeta();
  urm.setCompareFields( new String[] { "KEY" } );
  urm.setCaseInsensitive( new boolean[] { true } );

  String uniqueRowsStepPid = registry.getPluginId( StepPluginType.class, urm );
  StepMeta uniqueRowsStep = new StepMeta( uniqueRowsStepPid, uniqueRowsStepname, urm );
  transMeta.addStep( uniqueRowsStep );

  transMeta.addTransHop( new TransHopMeta( sortRowsStep, uniqueRowsStep ) );

  //
  // Create a dummy step
  //
  String dummyStepname = "dummy step";
  DummyTransMeta dm = new DummyTransMeta();

  String dummyPid = registry.getPluginId( StepPluginType.class, dm );
  StepMeta dummyStep = new StepMeta( dummyPid, dummyStepname, dm );
  transMeta.addStep( dummyStep );

  transMeta.addTransHop( new TransHopMeta( uniqueRowsStep, dummyStep ) );

  // Now execute the transformation...
  Trans trans = new Trans( transMeta );

  trans.prepareExecution( null );

  StepInterface si = trans.getStepInterface( dummyStepname, 0 );
  RowStepCollector dummyRc = new RowStepCollector();
  si.addRowListener( dummyRc );

  RowProducer rp = trans.addRowProducer( injectorStepname, 0 );
  trans.startThreads();

  // add rows
  List<RowMetaAndData> inputList = createData();
  for ( RowMetaAndData rm : inputList ) {
    rp.putRow( rm.getRowMeta(), rm.getData() );
  }
  rp.finished();

  trans.waitUntilFinished();

  List<RowMetaAndData> resultRows = dummyRc.getRowsWritten();
  checkRows( createResultDataSortCaseInsensitiveUniqueCaseInsensitive(), resultRows );
}
 
Example 13
Source File: MappingIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Tests that info steps are correctly identified via StepMetaInterface.getStepIOMeta()
 */
public void testInfoStreams_single() throws Exception {
  KettleEnvironment.init();
  PluginRegistry registry = PluginRegistry.getInstance();

  //
  // Create a new transformation with a row generator that feeds a Mapping (Sub-Transformation) Step
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "Mapping Info Test" );
  StepMeta rowGenerator = buildRowGeneratorStep( registry, "Generate Rows" );
  transMeta.addStep( rowGenerator );

  String mappingName = "mapping";
  MappingMeta mappingMeta = new MappingMeta();
  mappingMeta.setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );
  mappingMeta.setFileName( "test/org/pentaho/di/trans/steps/mapping/subtrans.ktr" );
  String mappingInputStepName = "input";
  mappingMeta.setInputMappings( Collections.singletonList( createMappingDef(
    rowGenerator.getName(), mappingInputStepName, "string", "a" ) ) );
  String mappingPid = registry.getPluginId( StepPluginType.class, mappingMeta );
  StepMeta mapping = new StepMeta( mappingPid, mappingName, mappingMeta );
  transMeta.addStep( mapping );

  TransHopMeta hopGeneratorToMapping = new TransHopMeta( rowGenerator, mapping );
  transMeta.addTransHop( hopGeneratorToMapping );

  Trans trans = new Trans( transMeta );
  trans.prepareExecution( null );

  // Mimic how a transformation is loaded and initialized from TransMeta.loadXML() or
  // KettleDatabaseRepositoryTransDelegate.loadTransformation()
  // so the StepMeta references are wired up in the MappingMeta properly
  // (Copied from TransMeta.loadXML())
  for ( int i = 0; i < transMeta.nrSteps(); i++ ) {
    StepMeta stepMeta = transMeta.getStep( i );
    StepMetaInterface sii = stepMeta.getStepMetaInterface();
    if ( sii != null ) {
      sii.searchInfoAndTargetSteps( transMeta.getSteps() );
    }
  }

  // Verify the transformation was configured properly
  assertEquals( "Transformation not initialized properly", 2, transMeta.nrSteps() );

  StepMeta meta = transMeta.getStep( 1 );
  assertTrue( "Transformation not initialized properly", meta.getStepMetaInterface() instanceof MappingMeta );

  MappingMeta loadedMappingMeta = (MappingMeta) meta.getStepMetaInterface();
  assertEquals( "Expected a single input mapping definition", 1, loadedMappingMeta.getInputMappings().size() );

  StepIOMetaInterface ioMeta = loadedMappingMeta.getStepIOMeta();
  assertEquals( "Expected a single Info Stream", 1, ioMeta.getInfoStreams().size() );
  assertEquals( "Expected a single Info Step", 1, loadedMappingMeta.getInfoSteps().length );

  // Verify the transformation can be executed
  StepInterface si = trans.getStepInterface( mappingName, 0 );
  RowStepCollector rc = new RowStepCollector();
  si.addRowListener( rc );

  trans.startThreads();
  trans.waitUntilFinished();

  assertEquals( 1, rc.getRowsRead().size() );
  assertEquals( 1, rc.getRowsWritten().size() );
}
 
Example 14
Source File: UniqueRowsIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void testSortCaseSensitiveUniqueCaseSensitive() throws Exception {
  KettleEnvironment.init();

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "uniquerowstest" );

  PluginRegistry registry = PluginRegistry.getInstance();

  //
  // create an injector step...
  //
  String injectorStepname = "injector step";
  InjectorMeta im = new InjectorMeta();

  // Set the information of the injector.
  String injectorPid = registry.getPluginId( StepPluginType.class, im );
  StepMeta injectorStep = new StepMeta( injectorPid, injectorStepname, im );
  transMeta.addStep( injectorStep );

  //
  // Create a sort rows step
  //
  String sortRowsStepname = "sort rows step";
  SortRowsMeta srm = new SortRowsMeta();
  srm.setFieldName( new String[] { "KEY" } );
  srm.setAscending( new boolean[] { true } );
  srm.setCaseSensitive( new boolean[] { true } );
  srm.setPreSortedField( new boolean[] { false } );
  srm.setPrefix( "SortRowsTest" );
  srm.setDirectory( "." );

  String sortRowsStepPid = registry.getPluginId( StepPluginType.class, srm );
  StepMeta sortRowsStep = new StepMeta( sortRowsStepPid, sortRowsStepname, srm );
  transMeta.addStep( sortRowsStep );

  transMeta.addTransHop( new TransHopMeta( injectorStep, sortRowsStep ) );

  //
  // Create a unique rows step
  //
  String uniqueRowsStepname = "unique rows step";
  UniqueRowsMeta urm = new UniqueRowsMeta();
  urm.setCompareFields( new String[] { "KEY" } );
  urm.setCaseInsensitive( new boolean[] { false } );

  String uniqueRowsStepPid = registry.getPluginId( StepPluginType.class, urm );
  StepMeta uniqueRowsStep = new StepMeta( uniqueRowsStepPid, uniqueRowsStepname, urm );
  transMeta.addStep( uniqueRowsStep );

  transMeta.addTransHop( new TransHopMeta( sortRowsStep, uniqueRowsStep ) );

  //
  // Create a dummy step
  //
  String dummyStepname = "dummy step";
  DummyTransMeta dm = new DummyTransMeta();

  String dummyPid = registry.getPluginId( StepPluginType.class, dm );
  StepMeta dummyStep = new StepMeta( dummyPid, dummyStepname, dm );
  transMeta.addStep( dummyStep );

  transMeta.addTransHop( new TransHopMeta( uniqueRowsStep, dummyStep ) );

  // Now execute the transformation...
  Trans trans = new Trans( transMeta );

  trans.prepareExecution( null );

  StepInterface si = trans.getStepInterface( dummyStepname, 0 );
  RowStepCollector dummyRc = new RowStepCollector();
  si.addRowListener( dummyRc );

  RowProducer rp = trans.addRowProducer( injectorStepname, 0 );
  trans.startThreads();

  // add rows
  List<RowMetaAndData> inputList = createData();
  for ( RowMetaAndData rm : inputList ) {
    rp.putRow( rm.getRowMeta(), rm.getData() );
  }
  rp.finished();

  trans.waitUntilFinished();

  List<RowMetaAndData> resultRows = dummyRc.getRowsWritten();
  checkRows( createResultDataSortCaseSensitiveUniqueCaseSensitive(), resultRows );
}
 
Example 15
Source File: TextFileOutputIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Tests the normal output capability of the TextFileOutput step
 *
 * @throws Exception
 */
@Test
public void testTextFileOutput6() throws Exception {
  KettleEnvironment.init();

  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "testTextFileOutput6" );
  PluginRegistry registry = PluginRegistry.getInstance();

  // create an injector step
  String injectorStepName = "injector step";
  StepMeta injectorStep = TestUtilities.createInjectorStep( injectorStepName, registry );
  transMeta.addStep( injectorStep );

  // create a row generator step
  StepMeta rowGeneratorStep = createRowGeneratorStep( "Create rows for testTextFileOutput5", registry );
  transMeta.addStep( rowGeneratorStep );

  // create a TransHopMeta for injector and add it to the transMeta
  TransHopMeta hop_injectory_rowGenerator = new TransHopMeta( injectorStep, rowGeneratorStep );
  transMeta.addTransHop( hop_injectory_rowGenerator );

  // create the text file output step with no compression
  // but first lets get a filename
  String textFileName = "testTextFileOutput6";
  String textFileOutputStepName = "text file output step";
  StepMeta textFileOutputStep =
    createTextFileOutputStep( textFileOutputStepName, textFileName, "None", registry );
  transMeta.addStep( textFileOutputStep );

  // create a TransHopMeta for textFileOutputStep and add it to the transMeta
  TransHopMeta hop_RowGenerator_outputTextFile = new TransHopMeta( rowGeneratorStep, textFileOutputStep );
  transMeta.addTransHop( hop_RowGenerator_outputTextFile );

  // Now execute the transformation...
  Trans trans = new Trans( transMeta );
  trans.prepareExecution( null );

  // Create a row collector and add it to the dummy step interface
  StepInterface dummyStepInterface = trans.getStepInterface( textFileOutputStepName, 0 );
  RowStepCollector dummyRowCollector = new RowStepCollector();
  dummyStepInterface.addRowListener( dummyRowCollector );

  trans.startThreads();
  trans.waitUntilFinished();

  // Compare the results
  List<RowMetaAndData> resultRows = dummyRowCollector.getRowsWritten();
  Object[][] rows = new Object[10][3];
  File f = new File( textFileName + "." + EXTENSION );
  f.deleteOnExit();
  try {
    FileInputStream fin = new FileInputStream( f );
    InputStreamReader xover = new InputStreamReader( fin );
    BufferedReader input = new BufferedReader( xover );

    readData1Rows( rows, input );

    fin.close();

  } catch ( IOException e ) {
    fail( e.getLocalizedMessage() );
  }

  List<RowMetaAndData> outFileRows = createResultDataFromObjects( rows );

  try {
    TestUtilities.checkRows( resultRows, outFileRows );
  } catch ( TestFailedException tfe ) {
    fail( tfe.getMessage() );
  }
}
 
Example 16
Source File: ExecSQLRowIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Basic Test case for Exec SQL Row. This tests a commit size of one (i.e. "simulated" autocommit)
 */
@Test
public void testExecSQLRow2() throws Exception {
  KettleEnvironment.init();

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "transname" );

  // Add the database connections
  for ( int i = 0; i < databasesXML.length; i++ ) {
    DatabaseMeta databaseMeta = new DatabaseMeta( databasesXML[i] );
    transMeta.addDatabase( databaseMeta );
  }

  DatabaseMeta dbInfo = transMeta.findDatabase( "db" );
  PluginRegistry registry = PluginRegistry.getInstance();

  //
  // create an injector step...
  //
  String injectorStepname = "injector step";
  InjectorMeta im = new InjectorMeta();

  // Set the information of the injector.

  String injectorPid = registry.getPluginId( StepPluginType.class, im );
  StepMeta injectorStep = new StepMeta( injectorPid, injectorStepname, im );
  transMeta.addStep( injectorStep );

  //
  // create the Exec SQL Row step...
  //
  String stepName = "delete from [" + execsqlrow_testtable + "]";
  ExecSQLRowMeta execsqlmeta = new ExecSQLRowMeta();
  execsqlmeta.setDatabaseMeta( transMeta.findDatabase( "db" ) );
  execsqlmeta.setCommitSize( 1 );
  execsqlmeta.setSqlFieldName( "SQL" );

  String execSqlRowId = registry.getPluginId( StepPluginType.class, execsqlmeta );
  StepMeta execSqlRowStep = new StepMeta( execSqlRowId, stepName, execsqlmeta );
  execSqlRowStep.setDescription( "Deletes information from table ["
    + execsqlrow_testtable + "] on database [" + dbInfo + "]" );
  transMeta.addStep( execSqlRowStep );

  TransHopMeta hi = new TransHopMeta( injectorStep, execSqlRowStep );
  transMeta.addTransHop( hi );

  // Now execute the transformation...
  Trans trans = new Trans( transMeta );

  trans.prepareExecution( null );

  StepInterface si = trans.getStepInterface( stepName, 0 );
  RowStepCollector rc = new RowStepCollector();
  si.addRowListener( rc );

  RowProducer rp = trans.addRowProducer( injectorStepname, 0 );
  trans.startThreads();

  // add rows
  List<RowMetaAndData> inputList = createDataRows();
  for ( RowMetaAndData rm : inputList ) {
    rp.putRow( rm.getRowMeta(), rm.getData() );
  }
  rp.finished();

  trans.waitUntilFinished();

  List<RowMetaAndData> resultRows = rc.getRowsWritten();
  List<RowMetaAndData> goldRows = createResultDataRows();
  checkRows( goldRows, resultRows );
}
 
Example 17
Source File: InjectorIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Test case for injector step... also a show case on how to use injector.
 */
public void testInjector() throws Exception {
  KettleEnvironment.init();

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "injectortest" );

  PluginRegistry registry = PluginRegistry.getInstance();

  //
  // create an injector step...
  //
  String injectorStepname = "injector step";
  InjectorMeta im = new InjectorMeta();

  // Set the information of the injector.

  String injectorPid = registry.getPluginId( StepPluginType.class, im );
  StepMeta injectorStep = new StepMeta( injectorPid, injectorStepname, im );
  transMeta.addStep( injectorStep );

  //
  // Create a dummy step
  //
  String dummyStepname = "dummy step";
  DummyTransMeta dm = new DummyTransMeta();

  String dummyPid = registry.getPluginId( StepPluginType.class, dm );
  StepMeta dummyStep = new StepMeta( dummyPid, dummyStepname, dm );
  transMeta.addStep( dummyStep );

  TransHopMeta hi = new TransHopMeta( injectorStep, dummyStep );
  transMeta.addTransHop( hi );

  // Now execute the transformation...
  Trans trans = new Trans( transMeta );

  trans.prepareExecution( null );

  StepInterface si = trans.getStepInterface( dummyStepname, 0 );
  RowStepCollector rc = new RowStepCollector();
  si.addRowListener( rc );

  RowProducer rp = trans.addRowProducer( injectorStepname, 0 );
  trans.startThreads();

  // add rows
  List<RowMetaAndData> inputList = createData();
  for ( RowMetaAndData rm : inputList ) {
    rp.putRow( rm.getRowMeta(), rm.getData() );
  }
  rp.finished();

  trans.waitUntilFinished();

  List<RowMetaAndData> resultRows = rc.getRowsWritten();
  checkRows( resultRows, inputList );
}
 
Example 18
Source File: RepositoryUnitIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * This test is to ensure that the metadata for the GetXMLData step is preserved when saving to a
 * repository. The test creates a GetXMLData step and saves it to the repository. Then the local
 * data is changed and the step is read back in from the repository. It is then asserted that the
 * field value(s) are equal to what was saved.
 *
 * Test method for
 * {@link org.pentaho.di.trans.steps.getxmldata.GetXMLDataMeta#readRep(org.pentaho.di.repository.Repository,
 * org.pentaho.di.repository.ObjectId, java.util.List, java.util.Map)}
 * . Test method for
 * {@link org.pentaho.di.trans.steps.getxmldata.GetXMLDataMeta#saveRep(org.pentaho.di.repository.Repository,
 * org.pentaho.di.repository.ObjectId, jorg.pentaho.di.repository.ObjectId)}
 * .
 */
@Test
public void testGetXMLDataMetaSaveAndReadRep() {

  //
  // Create a new transformation...
  //
  TransMeta transMeta = new TransMeta();
  transMeta.setName( "getxmldata1" );

  //
  // Create a Get XML Data step
  //
  String getXMLDataName = "get xml data step";
  GetXMLDataMeta gxdm = new GetXMLDataMeta();

  String getXMLDataPid = registry.getPluginId( StepPluginType.class, gxdm );
  StepMeta getXMLDataStep = new StepMeta( getXMLDataPid, getXMLDataName, gxdm );
  transMeta.addStep( getXMLDataStep );

  GetXMLDataField[] fields = new GetXMLDataField[1];

  for ( int idx = 0; idx < fields.length; idx++ ) {
    fields[idx] = new GetXMLDataField();
  }

  fields[0].setName( "objectid" );
  fields[0].setXPath( "ObjectID" );
  fields[0].setElementType( GetXMLDataField.ELEMENT_TYPE_NODE );
  fields[0].setResultType( GetXMLDataField.RESULT_TYPE_TYPE_SINGLE_NODE );
  fields[0].setType( ValueMetaInterface.TYPE_STRING );
  fields[0].setFormat( "" );
  fields[0].setLength( -1 );
  fields[0].setPrecision( -1 );
  fields[0].setCurrencySymbol( "" );
  fields[0].setDecimalSymbol( "" );
  fields[0].setGroupSymbol( "" );
  fields[0].setTrimType( GetXMLDataField.TYPE_TRIM_NONE );

  gxdm.setDefault();
  gxdm.setEncoding( "UTF-8" );
  gxdm.setIsAFile( false );
  gxdm.setInFields( true );
  gxdm.setLoopXPath( "/" );
  gxdm.setXMLField( "field1" );
  gxdm.setInputFields( fields );

  try {
    // Now save the transformation and then read it back in
    transMeta.setRepository( repository );
    RepositoryDirectoryInterface repositoryDirectory = repository.findDirectory( "/" );
    transMeta.setRepositoryDirectory( repositoryDirectory );
    repository.transDelegate.saveTransformation( transMeta, "None", null, true );

    // Create a new placeholder meta and set the result type to something different than what was
    // saved,
    // to ensure the saveRep code is working correctly.
    GetXMLDataMeta newMeta = (GetXMLDataMeta) gxdm.clone();
    for ( GetXMLDataField f : newMeta.getInputFields() ) {
      f.setResultType( GetXMLDataField.RESULT_TYPE_VALUE_OF );
    }
    newMeta.readRep( repository, new MemoryMetaStore(), getXMLDataStep.getObjectId(), repository.getDatabases() );

    // Check that the value of Result Type is what was saved in the repo
    assertEquals( newMeta.getInputFields()[0].getResultTypeCode(), "singlenode" );

  } catch ( KettleException e ) {
    fail( "Test failed due to exception: " + e.getLocalizedMessage() );
  }
}
 
Example 19
Source File: MongoDbDatasourceDialogTest.java    From pentaho-mongodb-plugin with Apache License 2.0 4 votes vote down vote up
public static void main( String[] args ) {

    KettleLogStore.init();

    MongoDbInputMeta meta = new MongoDbInputMeta();
    meta.setJsonQuery( "{CITY:\"NYC\"}" );
    meta.setAuthenticationPassword( "password" );
    meta.setAuthenticationUser( "gmoran" );
    meta.setCollection( "big" );
    meta.setDbName( "data" );
    meta.setHostnames( "" );
    meta.setPort( "27017" );
    meta.setReadPreference( "Secondary preferred" );
    meta.setFieldsName( "{id:true}" );
    meta.setQueryIsPipeline( true );

    TransMeta trans = new TransMeta();
    StepMeta stepMeta = new StepMeta( "mongo_source", meta );
    trans.addStep( stepMeta );

    MongoDbInputXulDialog dlg = new MongoDbInputXulDialog( null, meta, trans, "mongo_source" );

    if ( dlg.open() != null ) {
      System.out.println( "Host name(s): ".concat( meta.getHostnames() != null ? meta.getHostnames() : "" ) );
      System.out.println( "Port: ".concat( meta.getPort() != null ? meta.getPort() : "" ) );
      System.out.println( "Database: ".concat( meta.getDbName() != null ? meta.getDbName() : "" ) );
      System.out.println( "Collection: ".concat( meta.getCollection() != null ? meta.getCollection() : "" ) );
      System.out.println( "User: ".concat( meta.getAuthenticationUser() != null ? meta.getAuthenticationUser() : "" ) );
      System.out.println(
          "Password: ".concat( meta.getAuthenticationPassword() != null ? meta.getAuthenticationPassword() : "" ) );
      System.out
          .println( "Connection Timeout: ".concat( meta.getConnectTimeout() != null ? meta.getConnectTimeout() : "" ) );
      System.out.println( "Socket Timeout: ".concat( meta.getSocketTimeout() != null ? meta.getSocketTimeout() : "" ) );
      System.out
          .println( "Read Preference: ".concat( meta.getReadPreference() != null ? meta.getReadPreference() : "" ) );
      System.out.println( "JSON Query: ".concat( meta.getJsonQuery() != null ? meta.getJsonQuery() : "" ) );
      System.out
          .println( "Is Agg Pipeline:".concat( meta.getQueryIsPipeline() ? "IS pipeline" : "IS NOT a pipeline" ) );
      System.out.println( "Field Expression: ".concat( meta.getFieldsName() != null ? meta.getFieldsName() : "" ) );
    }

  }
 
Example 20
Source File: PurRepositoryIT.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Test
public void testExport() throws Exception {
  final String exportFileName = new File( "test.export" ).getAbsolutePath(); //$NON-NLS-1$

  RepositoryDirectoryInterface rootDir = initRepo();
  String uniqueTransName = EXP_TRANS_NAME.concat( EXP_DBMETA_NAME );
  TransMeta transMeta = createTransMeta( EXP_DBMETA_NAME );

  // Create a database association
  DatabaseMeta dbMeta = createDatabaseMeta( EXP_DBMETA_NAME );
  repository.save( dbMeta, VERSION_COMMENT_V1, null );

  TableInputMeta tableInputMeta = new TableInputMeta();
  tableInputMeta.setDatabaseMeta( dbMeta );

  transMeta.addStep( new StepMeta( EXP_TRANS_STEP_1_NAME, tableInputMeta ) );

  RepositoryDirectoryInterface transDir = rootDir.findDirectory( DIR_TRANSFORMATIONS );
  repository.save( transMeta, VERSION_COMMENT_V1, null );
  deleteStack.push( transMeta ); // So this transformation is cleaned up afterward
  assertNotNull( transMeta.getObjectId() );
  ObjectRevision version = transMeta.getObjectRevision();
  assertNotNull( version );
  assertTrue( hasVersionWithComment( transMeta, VERSION_COMMENT_V1 ) );
  assertTrue( repository.exists( uniqueTransName, transDir, RepositoryObjectType.TRANSFORMATION ) );

  JobMeta jobMeta = createJobMeta( EXP_JOB_NAME );
  RepositoryDirectoryInterface jobsDir = rootDir.findDirectory( DIR_JOBS );
  repository.save( jobMeta, VERSION_COMMENT_V1, null );
  deleteStack.push( jobMeta );
  assertNotNull( jobMeta.getObjectId() );
  version = jobMeta.getObjectRevision();
  assertNotNull( version );
  assertTrue( hasVersionWithComment( jobMeta, VERSION_COMMENT_V1 ) );
  assertTrue( repository.exists( EXP_JOB_NAME, jobsDir, RepositoryObjectType.JOB ) );

  LogListener errorLogListener = new LogListener( LogLevel.ERROR );
  KettleLogStore.getAppender().addLoggingEventListener( errorLogListener );

  try {
    repository.getExporter().exportAllObjects( new MockProgressMonitorListener(), exportFileName, null, "all" ); //$NON-NLS-1$
    FileObject exportFile = KettleVFS.getFileObject( exportFileName );
    assertFalse( "file left open", exportFile.getContent().isOpen() );
    assertNotNull( exportFile );
    MockRepositoryExportParser parser = new MockRepositoryExportParser();
    SAXParserFactory.newInstance().newSAXParser().parse( KettleVFS.getInputStream( exportFile ), parser );
    if ( parser.getFatalError() != null ) {
      throw parser.getFatalError();
    }
    assertNotNull( "No nodes found in export", parser.getNodeNames() ); //$NON-NLS-1$
    assertTrue( "No nodes found in export", !parser.getNodeNames().isEmpty() ); //$NON-NLS-1$
    assertEquals( "Incorrect number of nodes", 5, parser.getNodeNames().size() ); //$NON-NLS-1$
    assertEquals( "Incorrect number of transformations", 1, parser.getNodesWithName( "transformation" ).size() ); //$NON-NLS-1$ //$NON-NLS-2$
    assertEquals( "Incorrect number of jobs", 1, parser.getNodesWithName( "job" ).size() ); //$NON-NLS-1$ //$NON-NLS-2$
    assertTrue( "log error", errorLogListener.getEvents().isEmpty() );

  } finally {
    KettleVFS.getFileObject( exportFileName ).delete();
    KettleLogStore.getAppender().removeLoggingEventListener( errorLogListener );
  }
}