org.pentaho.di.core.variables.VariableSpace Java Examples

The following examples show how to use org.pentaho.di.core.variables.VariableSpace. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MemoryGroupByMetaGetFieldsTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() throws KettlePluginException {
  mockSpace = mock( VariableSpace.class );
  doReturn("N" ).when( mockSpace ).getVariable( any(), anyString() );

  rowMeta = spy( new RowMeta() );
  memoryGroupByMeta = spy( new MemoryGroupByMeta() );

  mockStatic( ValueMetaFactory.class );
  when( ValueMetaFactory.createValueMeta( anyInt() ) ).thenCallRealMethod();
  when( ValueMetaFactory.createValueMeta( anyString(), anyInt() ) ).thenCallRealMethod();
  when( ValueMetaFactory.createValueMeta( "maxDate", 3, -1, -1 ) ).thenReturn( new ValueMetaDate( "maxDate" ) );
  when( ValueMetaFactory.createValueMeta( "minDate", 3, -1, -1 ) ).thenReturn( new ValueMetaDate( "minDate" ) );
  when( ValueMetaFactory.createValueMeta( "countDate", 5, -1, -1 ) ).thenReturn( new ValueMetaInteger( "countDate" ) );
  when( ValueMetaFactory.getValueMetaName( 3 ) ).thenReturn( "Date" );
  when( ValueMetaFactory.getValueMetaName( 5 ) ).thenReturn( "Integer" );
}
 
Example #2
Source File: GetFilesRowsCountMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So
 * what this does is turn the name of files into absolute paths OR it simply includes the resource in the ZIP file.
 * For now, we'll simply turn it into an absolute path and pray that the file is on a shared drive or something like
 * that.
 *
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    // In case the name of the file comes from previous steps, forget about this!
    //
    if ( !filefield ) {
      for ( int i = 0; i < fileName.length; i++ ) {
        FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space );
        fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) );
      }
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example #3
Source File: JsonInputField.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public ValueMetaInterface toValueMeta( String fieldOriginStepName, VariableSpace vspace ) throws KettlePluginException {
  int type = getType();
  if ( type == ValueMetaInterface.TYPE_NONE ) {
    type = ValueMetaInterface.TYPE_STRING;
  }
  ValueMetaInterface v =
      ValueMetaFactory.createValueMeta( vspace != null ? vspace.environmentSubstitute( getName() ) : getName(), type );
  v.setLength( getLength() );
  v.setPrecision( getPrecision() );
  v.setOrigin( fieldOriginStepName );
  v.setConversionMask( getFormat() );
  v.setDecimalSymbol( getDecimalSymbol() );
  v.setGroupingSymbol( getGroupSymbol() );
  v.setCurrencySymbol( getCurrencySymbol() );
  v.setTrimType( getTrimType() );
  return v;
}
 
Example #4
Source File: StepWithMappingMetaTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
@PrepareForTest( StepWithMappingMeta.class )
public void activateParamsTest() throws Exception {
  String childParam = "childParam";
  String childValue = "childValue";
  String paramOverwrite = "paramOverwrite";
  String parentValue = "parentValue";
  String stepValue = "stepValue";

  VariableSpace parent = new Variables();
  parent.setVariable( paramOverwrite, parentValue );

  TransMeta childVariableSpace = new TransMeta();
  childVariableSpace.addParameterDefinition( childParam, "", "" );
  childVariableSpace.setParameterValue( childParam, childValue );

  String[] parameters = childVariableSpace.listParameters();
  StepWithMappingMeta.activateParams( childVariableSpace, childVariableSpace, parent,
    parameters, new String[] { childParam, paramOverwrite }, new String[] { childValue, stepValue }, true );

  Assert.assertEquals( childValue, childVariableSpace.getVariable( childParam ) );
  // the step parameter prevails
  Assert.assertEquals( stepValue, childVariableSpace.getVariable( paramOverwrite ) );
}
 
Example #5
Source File: MongoDbOutputTest.java    From pentaho-mongodb-plugin with Apache License 2.0 6 votes vote down vote up
@Test public void testTopLevelArrayStructureWithObjects() throws Exception {
  List<MongoDbOutputMeta.MongoField> paths = asList( mf( "field1", true, "[0]" ), mf( "field2", true, "[1]" ) );

  RowMetaInterface rmi = new RowMeta();
  rmi.addValueMeta( new ValueMetaString( "field1" ) );
  rmi.addValueMeta( new ValueMetaInteger( "field2" ) );

  Object[] row = new Object[ 2 ];
  row[ 0 ] = "value1";
  row[ 1 ] = 12L;
  VariableSpace vs = new Variables();

  for ( MongoDbOutputMeta.MongoField f : paths ) {
    f.init( vs );
  }

  DBObject result = kettleRowToMongo( paths, rmi, row, MongoDbOutputData.MongoTopLevel.ARRAY, false );

  assertEquals( JSON.serialize( result ), "[ { \"field1\" : \"value1\"} , { \"field2\" : 12}]" );
}
 
Example #6
Source File: CiviMeta.java    From civicrm-data-integration with GNU General Public License v3.0 6 votes vote down vote up
public void getFields(RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) {

    /*
     * Adicionar los campos de salida, estos son los que se toman luego en el
     * metodo processRow en la clase CiviInput. Note que esta clase es tambien
     * un atributo privado de CiviInput Aqui decimos el tipo de dato del campo
     */

        for (String cField : civiCrmKeyList) {
            try {
                // Añadido una verifición para evitar que campos ----------> Line no existentes queden fuera de la salida del paso al
                // no encontrarse en el listado de campos devuelto por CiviCRM. En este caso se asume
                // que el campo es una cadena automáticamente
                ValueMetaInterface v = new ValueMeta(outputMap.get(cField), (civiCrmListingFields.get(cField) != null) ? civiCrmListingFields.get(cField).getMetaInterfaceType() : ValueMetaInterface.TYPE_STRING);
                v.setOrigin(origin);
                r.addValueMeta(v);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    }
 
Example #7
Source File: PDI5436Test.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private DatabaseLookupMeta mockStepMeta() throws KettleStepException {
  DatabaseLookupMeta stepMeta = smh.initStepMetaInterface;
  doReturn( mock( DatabaseMeta.class ) ).when( stepMeta ).getDatabaseMeta();
  doReturn( new String[] { "=" } ).when( stepMeta ).getKeyCondition();

  doCallRealMethod().when( stepMeta ).getFields( any( RowMetaInterface.class ), anyString(),
      any( RowMetaInterface[].class ), any( StepMeta.class ), any( VariableSpace.class ), any( Repository.class ),
      any( IMetaStore.class ) );
  doReturn( new String[] { "value" } ).when( stepMeta ).getReturnValueNewName();
  doReturn( new int[] { ValueMetaInterface.TYPE_STRING } ).when( stepMeta ).getReturnValueDefaultType();
  doReturn( true ).when( stepMeta ).isCached();
  doReturn( true ).when( stepMeta ).isLoadingAllDataInCache();
  doReturn( new String[] { "id" } ).when( stepMeta ).getStreamKeyField1();
  doReturn( new String[] { null } ).when( stepMeta ).getStreamKeyField2();
  doReturn( new String[] { "id" } ).when( stepMeta ).getTableKeyField();
  doReturn( new String[] { "value" } ).when( stepMeta ).getReturnValueField();
  doReturn( new String[] { "" } ).when( stepMeta ).getReturnValueDefault();
  doReturn( new int[] { ValueMetaInterface.TYPE_STRING } ).when( stepMeta ).getReturnValueDefaultType();
  when( stepMeta.getStreamKeyField2() ).thenReturn( new String[]{ "a", "b", "c" } );

  return stepMeta;
}
 
Example #8
Source File: TransUnitTest.java    From pentaho-pdi-dataset with Apache License 2.0 6 votes vote down vote up
public String calculateCompleteFilename( VariableSpace space ) {

    String baseFilePath = space.environmentSubstitute( basePath );
    if ( StringUtils.isEmpty( baseFilePath ) ) {
      // See if the base path environment variable is set
      //
      baseFilePath = space.getVariable( DataSetConst.VARIABLE_UNIT_TESTS_BASE_PATH );
    }
    if ( StringUtils.isEmpty( baseFilePath ) ) {
      baseFilePath = "";
    }
    if ( StringUtils.isNotEmpty( baseFilePath ) ) {
      if ( !baseFilePath.endsWith( File.separator ) ) {
        baseFilePath += File.separator;
      }
    }
    return baseFilePath + transFilename;
  }
 
Example #9
Source File: TextFileInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Since the exported transformation that runs this will reside in a ZIP file, we can't reference files relatively. So
 * what this does is turn the name of files into absolute paths OR it simply includes the resource in the ZIP file.
 * For now, we'll simply turn it into an absolute path and pray that the file is on a shared drive or something like
 * that.
 *
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    // In case the name of the file comes from previous steps, forget about this!
    //
    if ( !acceptingFilenames ) {

      // Replace the filename ONLY (folder or filename)
      //
      for ( int i = 0; i < fileName.length; i++ ) {
        FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space );
        fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) );
      }
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example #10
Source File: ReplaceStringMetaTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void testGetFields() throws KettleStepException {
  ReplaceStringMeta meta = new ReplaceStringMeta();
  meta.setFieldInStream( new String[] { FIELD_NAME } );
  meta.setFieldOutStream( new String[] { FIELD_NAME } );

  ValueMetaInterface inputFieldMeta = mock( ValueMetaInterface.class );
  when( inputFieldMeta.getStringEncoding() ).thenReturn( ENCODING_NAME );

  RowMetaInterface inputRowMeta = mock( RowMetaInterface.class );
  when( inputRowMeta.searchValueMeta( anyString() ) ).thenReturn( inputFieldMeta );

  StepMeta nextStep = mock( StepMeta.class );
  VariableSpace space = mock( VariableSpace.class );
  Repository repository = mock( Repository.class );
  IMetaStore metaStore = mock( IMetaStore.class );
  meta.getFields( inputRowMeta, "test", null, nextStep, space, repository, metaStore );

  ArgumentCaptor<ValueMetaInterface> argument = ArgumentCaptor.forClass( ValueMetaInterface.class );
  verify( inputRowMeta ).addValueMeta( argument.capture() );
  assertEquals( ENCODING_NAME, argument.getValue().getStringEncoding() );
}
 
Example #11
Source File: AbstractMetaTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void testInitializeShareInjectVariables() {
  meta.initializeVariablesFrom( null );
  VariableSpace parent = mock( VariableSpace.class );
  when( parent.getVariable( "var1" ) ).thenReturn( "x" );
  when( parent.listVariables() ).thenReturn( new String[]{ "var1" } );
  meta.initializeVariablesFrom( parent );
  assertEquals( "x", meta.getVariable( "var1" ) );
  assertNotNull( meta.listVariables() );
  VariableSpace newVars = mock( VariableSpace.class );
  when( newVars.getVariable( "var2" ) ).thenReturn( "y" );
  when( newVars.listVariables() ).thenReturn( new String[]{ "var2" } );
  meta.shareVariablesWith( newVars );
  assertEquals( "y", meta.getVariable( "var2" ) );
  Map<String, String> props = new HashMap<>();
  props.put( "var3", "a" );
  props.put( "var4", "b" );
  meta.shareVariablesWith( new Variables() );
  meta.injectVariables( props );
  // Need to "Activate" the injection, we can initialize from null
  meta.initializeVariablesFrom( null );
  assertEquals( "a", meta.getVariable( "var3" ) );
  assertEquals( "b", meta.getVariable( "var4" ) );
}
 
Example #12
Source File: SasInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {

  for ( SasInputField field : outputFields ) {
    try {
      ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta( field.getRename(), field.getType() );
      valueMeta.setLength( field.getLength(), field.getPrecision() );
      valueMeta.setDecimalSymbol( field.getDecimalSymbol() );
      valueMeta.setGroupingSymbol( field.getGroupingSymbol() );
      valueMeta.setConversionMask( field.getConversionMask() );
      valueMeta.setTrimType( field.getTrimType() );
      valueMeta.setOrigin( name );

      row.addValueMeta( valueMeta );
    } catch ( Exception e ) {
      throw new KettleStepException( e );
    }
  }
}
 
Example #13
Source File: AvroNestedReader.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
/**
 * Reset this field. Should be called prior to processing a new field value from the avro file
 *
 * @param space environment variables (values that environment variables resolve to cannot contain "."s)
 */
public void reset( VariableSpace space ) {
  m_tempParts.clear();

  for ( String part : m_pathParts ) {
    if ( space == null ) {
      m_tempParts.add( part );
    } else {
      m_tempParts.add( space.environmentSubstitute( part ) );
    }
  }

  // reset sub fields
  for ( AvroInputField f : m_subFields ) {
    resetField( f, space );
  }
}
 
Example #14
Source File: ValueDataUtil.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static Object percent2( ValueMetaInterface metaA, Object dataA, ValueMetaInterface metaB, Object dataB, VariableSpace space ) throws KettleValueException {
  if ( dataA == null || dataB == null ) {
    return null;
  }

  switch ( metaA.getType() ) {
    case ValueMetaInterface.TYPE_NUMBER:
      return new Double( metaA.getNumber( dataA ).doubleValue()
        - divideDoubles( multiplyDoubles( metaA.getNumber( dataA ), metaB.getNumber( dataB ) ), 100.0D ) );
    case ValueMetaInterface.TYPE_INTEGER:
      return new Long( metaA.getInteger( dataA ).longValue()
        - divideLongs( multiplyLongs( metaA.getInteger( dataA ), metaB.getInteger( dataB ) ), 100L ) );
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return metaA.getBigNumber( dataA ).subtract(
        divideBigDecimals( multiplyBigDecimals(
          metaB.getBigNumber( dataB ), metaA.getBigNumber( dataA ), null ), new BigDecimal( 100 ), space ) );
    default:
      throw new KettleValueException( "The 'A-B%' function only works on numeric data" );
  }
}
 
Example #15
Source File: StepWithMappingMetaTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
@PrepareForTest( StepWithMappingMeta.class )
public void replaceVariablesWithJobInternalVariablesTest()  {
  String variableOverwrite = "paramOverwrite";
  String variableChildOnly = "childValueVariable";
  String [] jobVariables = Const.INTERNAL_JOB_VARIABLES;
  VariableSpace ChildVariables = new Variables();
  VariableSpace replaceByParentVariables = new Variables();

  for ( String internalVariable : jobVariables ) {
    ChildVariables.setVariable( internalVariable, "childValue" );
    replaceByParentVariables.setVariable( internalVariable, "parentValue" );
  }

  ChildVariables.setVariable( variableChildOnly, "childValueVariable" );
  ChildVariables.setVariable( variableOverwrite, "childNotInternalValue" );
  replaceByParentVariables.setVariable( variableOverwrite, "parentNotInternalValue" );

  StepWithMappingMeta.replaceVariableValues( ChildVariables, replaceByParentVariables );
  // do not replace internal variables
  Assert.assertEquals( "childValue", ChildVariables.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) );
  // replace non internal variables
  Assert.assertEquals( "parentNotInternalValue", ChildVariables.getVariable( variableOverwrite ) );
  // keep child only variables
  Assert.assertEquals( variableChildOnly, ChildVariables.getVariable( variableChildOnly ) );

}
 
Example #16
Source File: MongoDbOutputTest.java    From pentaho-mongodb-plugin with Apache License 2.0 5 votes vote down vote up
@Test public void testTopLevelObjectStructureTwoLevelNested() throws Exception {
  List<MongoDbOutputMeta.MongoField> paths = new ArrayList<MongoDbOutputMeta.MongoField>( 2 );

  MongoDbOutputMeta.MongoField mf = new MongoDbOutputMeta.MongoField();
  mf.m_incomingFieldName = "field1";
  mf.m_mongoDocPath = "nestedDoc.secondNested";
  mf.m_useIncomingFieldNameAsMongoFieldName = true;
  paths.add( mf );

  mf = new MongoDbOutputMeta.MongoField();
  mf.m_incomingFieldName = "field2";
  mf.m_mongoDocPath = "nestedDoc";
  mf.m_useIncomingFieldNameAsMongoFieldName = true;
  paths.add( mf );

  RowMetaInterface rmi = new RowMeta();
  rmi.addValueMeta( new ValueMetaString( "field1" ) );
  rmi.addValueMeta( new ValueMetaInteger( "field2" ) );

  Object[] row = new Object[ 2 ];
  row[ 0 ] = "value1";
  row[ 1 ] = 12L;
  VariableSpace vs = new Variables();

  for ( MongoDbOutputMeta.MongoField f : paths ) {
    f.init( vs );
  }

  DBObject result = kettleRowToMongo( paths, rmi, row, MongoDbOutputData.MongoTopLevel.RECORD, false );

  assertEquals( JSON.serialize( result ),
    "{ \"nestedDoc\" : { \"secondNested\" : { \"field1\" : \"value1\"} , \"field2\" : 12}}" );
}
 
Example #17
Source File: SynchronizeAfterMergeMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public RowMetaInterface getRequiredFields( VariableSpace space ) throws KettleException {
  String realTableName = space.environmentSubstitute( tableName );
  String realSchemaName = space.environmentSubstitute( schemaName );

  if ( databaseMeta != null ) {
    Database db = new Database( loggingObject, databaseMeta );
    try {
      db.connect();

      if ( !Utils.isEmpty( realTableName ) ) {
        // Check if this table exists...
        if ( db.checkTableExists( realSchemaName, realTableName ) ) {
          return db.getTableFieldsMeta( realSchemaName, realTableName );
        } else {
          throw new KettleException( BaseMessages.getString(
            PKG, "SynchronizeAfterMergeMeta.Exception.TableNotFound" ) );
        }
      } else {
        throw new KettleException( BaseMessages.getString(
          PKG, "SynchronizeAfterMergeMeta.Exception.TableNotSpecified" ) );
      }
    } catch ( Exception e ) {
      throw new KettleException( BaseMessages.getString(
        PKG, "SynchronizeAfterMergeMeta.Exception.ErrorGettingFields" ), e );
    } finally {
      db.disconnect();
    }
  } else {
    throw new KettleException( BaseMessages.getString(
      PKG, "SynchronizeAfterMergeMeta.Exception.ConnectionNotDefined" ) );
  }

}
 
Example #18
Source File: MapReduceJobBuilderImpl.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
public MapReduceJobBuilderImpl( NamedCluster namedCluster, HadoopShim hadoopShim, LogChannelInterface log,
                                VariableSpace variableSpace ) {
  this.namedCluster = namedCluster;
  this.hadoopShim = hadoopShim;
  this.log = log;
  this.variableSpace = variableSpace;
  this.userDefined = new HashMap<>();
}
 
Example #19
Source File: WebServiceMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  // Input rows and output rows are different in the webservice step
  //
  if ( !isPassingInputData() ) {
    r.clear();
  }

  // Add the output fields...
  //
  for ( WebServiceField field : getFieldsOut() ) {
    int valueType = field.getType();

    // If the type is unrecognized we give back the XML as a String...
    //
    if ( field.getType() == ValueMetaInterface.TYPE_NONE ) {
      valueType = ValueMetaInterface.TYPE_STRING;
    }

    try {
      ValueMetaInterface vValue = ValueMetaFactory.createValueMeta( field.getName(), valueType );
      vValue.setOrigin( name );
      r.addValueMeta( vValue );
    } catch ( Exception e ) {
      throw new KettleStepException( e );
    }
  }
}
 
Example #20
Source File: LdapProtocolFactoryIT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testLdapProtocolFactoryReturnsLdapSslProtocolForName() throws KettleException {
  when( mockLdapMeta.getProtocol() ).thenReturn( LdapSslProtocol.getName() );
  LdapProtocol protocol =
    new LdapProtocolFactory( null ).createLdapProtocol( mock( VariableSpace.class ), mockLdapMeta, null );
  assertTrue( protocol.getClass().equals( LdapSslProtocol.class ) );
}
 
Example #21
Source File: JmsConsumerMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override public RowMeta getRowMeta( String s, VariableSpace variableSpace ) {
  RowMeta rowMeta = new RowMeta();
  rowMeta.addValueMeta( new ValueMetaString( messageField ) );
  rowMeta.addValueMeta( new ValueMetaString( destinationField ) );
  rowMeta.addValueMeta( new ValueMetaString( messageId ) );
  rowMeta.addValueMeta( new ValueMetaString( jmsTimestamp ) );
  rowMeta.addValueMeta( new ValueMetaString( jmsRedelivered ) );
  return rowMeta;
}
 
Example #22
Source File: TransLogTable.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public static TransLogTable getDefault( VariableSpace space, HasDatabasesInterface databasesInterface,
  List<StepMeta> steps ) {
  TransLogTable table = new TransLogTable( space, databasesInterface, steps );

  table.fields.add( new LogTableField( ID.ID_BATCH.id, true, false, "ID_BATCH", BaseMessages.getString( PKG, "TransLogTable.FieldName.BatchID" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.BatchID" ), ValueMetaInterface.TYPE_INTEGER, 8 ) );
  table.fields.add( new LogTableField( ID.CHANNEL_ID.id, true, false, "CHANNEL_ID", BaseMessages.getString( PKG, "TransLogTable.FieldName.ChannelID" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.ChannelID" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.TRANSNAME.id, true, false, "TRANSNAME", BaseMessages.getString( PKG, "TransLogTable.FieldName.TransName" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.TransName" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.STATUS.id, true, false, "STATUS", BaseMessages.getString( PKG, "TransLogTable.FieldName.Status" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.Status" ), ValueMetaInterface.TYPE_STRING, 15 ) );
  table.fields.add( new LogTableField( ID.LINES_READ.id, true, true, "LINES_READ", BaseMessages.getString( PKG, "TransLogTable.FieldName.LinesRead" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LinesRead" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_WRITTEN.id, true, true, "LINES_WRITTEN", BaseMessages.getString( PKG, "TransLogTable.FieldName.LinesWritten" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LinesWritten" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_UPDATED.id, true, true, "LINES_UPDATED", BaseMessages.getString( PKG, "TransLogTable.FieldName.LinesUpdated" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LinesUpdated" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_INPUT.id, true, true, "LINES_INPUT", BaseMessages.getString( PKG, "TransLogTable.FieldName.LinesInput" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LinesInput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_OUTPUT.id, true, true, "LINES_OUTPUT", BaseMessages.getString( PKG, "TransLogTable.FieldName.LinesOutput" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LinesOutput" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.LINES_REJECTED.id, true, true, "LINES_REJECTED", BaseMessages.getString( PKG, "TransLogTable.FieldName.LinesRejected" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LinesRejected" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.ERRORS.id, true, false, "ERRORS", BaseMessages.getString( PKG, "TransLogTable.FieldName.Errors" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.Errors" ), ValueMetaInterface.TYPE_INTEGER, 18 ) );
  table.fields.add( new LogTableField( ID.STARTDATE.id, true, false, "STARTDATE", BaseMessages.getString( PKG, "TransLogTable.FieldName.StartDateRange" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.StartDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.ENDDATE.id, true, false, "ENDDATE", BaseMessages.getString( PKG, "TransLogTable.FieldName.EndDateRange" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.EndDateRange" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.LOGDATE.id, true, false, "LOGDATE", BaseMessages.getString( PKG, "TransLogTable.FieldName.LogDate" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LogDate" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.DEPDATE.id, true, false, "DEPDATE", BaseMessages.getString( PKG, "TransLogTable.FieldName.DepDate" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.DepDate" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.REPLAYDATE.id, true, false, "REPLAYDATE", BaseMessages.getString( PKG, "TransLogTable.FieldName.ReplayDate" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.ReplayDate" ), ValueMetaInterface.TYPE_DATE, -1 ) );
  table.fields.add( new LogTableField( ID.LOG_FIELD.id, true, false, "LOG_FIELD", BaseMessages.getString( PKG, "TransLogTable.FieldName.LogField" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.LogField" ), ValueMetaInterface.TYPE_STRING, DatabaseMeta.CLOB_LENGTH ) );
  table.fields.add( new LogTableField( ID.EXECUTING_SERVER.id, false, false, "EXECUTING_SERVER", BaseMessages.getString( PKG, "TransLogTable.FieldName.ExecutingServer" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.ExecutingServer" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.EXECUTING_USER.id, false, false, "EXECUTING_USER", BaseMessages.getString( PKG, "TransLogTable.FieldName.ExecutingUser" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.ExecutingUser" ), ValueMetaInterface.TYPE_STRING, 255 ) );
  table.fields.add( new LogTableField( ID.CLIENT.id, false, false, "CLIENT", BaseMessages.getString( PKG, "TransLogTable.FieldName.Client" ), BaseMessages.getString( PKG, "TransLogTable.FieldDescription.Client" ), ValueMetaInterface.TYPE_STRING, 255 ) );

  table.findField( ID.ID_BATCH ).setKey( true );
  table.findField( ID.LOGDATE ).setLogDateField( true );
  table.findField( ID.LOG_FIELD ).setLogField( true );
  table.findField( ID.CHANNEL_ID ).setVisible( false );
  table.findField( ID.TRANSNAME ).setVisible( false );
  table.findField( ID.STATUS ).setStatusField( true );
  table.findField( ID.ERRORS ).setErrorsField( true );
  table.findField( ID.TRANSNAME ).setNameField( true );

  return table;
}
 
Example #23
Source File: AbstractFileValidator.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
protected VariableSpace getVariableSpace( CheckResultSourceInterface source, String propertyName,
  List<CheckResultInterface> remarks, ValidatorContext context ) {
  Object obj = context.get( KEY_VARIABLE_SPACE );
  if ( obj instanceof VariableSpace ) {
    return (VariableSpace) obj;
  } else {
    JobEntryValidatorUtils.addGeneralRemark(
      source, propertyName, getName(), remarks, "messages.failed.missingKey",
      CheckResultInterface.TYPE_RESULT_ERROR );
    return null;
  }
}
 
Example #24
Source File: JobEntrySFTPPUT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
  Repository repository, IMetaStore metaStore ) {
  JobEntryValidatorUtils.andValidator().validate( this, "serverName", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
  JobEntryValidatorUtils.andValidator().validate(
    this, "localDirectory", remarks, AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator(),
        JobEntryValidatorUtils.fileExistsValidator() ) );
  JobEntryValidatorUtils.andValidator().validate( this, "userName", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
  JobEntryValidatorUtils.andValidator().validate( this, "password", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) );
  JobEntryValidatorUtils.andValidator().validate( this, "serverPort", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.integerValidator() ) );
}
 
Example #25
Source File: IfNullTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private IfNullMeta mockProcessRowMeta() throws KettleStepException {
  IfNullMeta processRowMeta = smh.processRowsStepMetaInterface;
  doReturn( createFields( "null-field", "empty-field", "space-field" ) ).when( processRowMeta ).getFields();
  doReturn( "replace-value" ).when( processRowMeta ).getReplaceAllByValue();
  doCallRealMethod().when( processRowMeta ).getFields( any( RowMetaInterface.class ), anyString(), any(
      RowMetaInterface[].class ), any( StepMeta.class ), any( VariableSpace.class ), any( Repository.class ), any(
          IMetaStore.class ) );
  return processRowMeta;
}
 
Example #26
Source File: LoadFileInputTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Before
public void setup() throws FileSystemException {
  fs = VFS.getManager();
  filesPath = '/' + this.getClass().getPackage().getName().replace( '.', '/' ) + "/files/";

  transName = "LoadFileInput";
  transMeta = new TransMeta();
  transMeta.setName( transName );
  trans = new Trans( transMeta );

  stepMetaInterface = spy( new LoadFileInputMeta() );
  stepInputFiles = new FileInputList();
  Mockito.doReturn( stepInputFiles ).when( stepMetaInterface ).getFiles( any( VariableSpace.class ) );
  String stepId = PluginRegistry.getInstance().getPluginId( StepPluginType.class, stepMetaInterface );
  stepMeta = new StepMeta( stepId, "Load File Input", stepMetaInterface );
  transMeta.addStep( stepMeta );

  stepDataInterface = new LoadFileInputData();

  stepCopyNr = 0;

  stepLoadFileInput = new LoadFileInput( stepMeta, stepDataInterface, stepCopyNr, transMeta, trans );

  assertSame( stepMetaInterface, stepMeta.getStepMetaInterface() );

  runtimeSMI = stepMetaInterface;
  runtimeSDI = runtimeSMI.getStepData();

  inputField = new LoadFileInputField();
  ((LoadFileInputMeta) runtimeSMI).setInputFields( new LoadFileInputField[] { inputField } );
  stepLoadFileInput.init( runtimeSMI, runtimeSDI );
}
 
Example #27
Source File: KettleFileSystemConfigBuilderFactory.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * This factory returns a FileSystemConfigBuilder. Custom FileSystemConfigBuilders can be created by implementing the
 * {@link IKettleFileSystemConfigBuilder} or overriding the {@link KettleGenericFileSystemConfigBuilder}
 *
 * @see org.apache.commons.vfs.FileSystemConfigBuilder
 *
 * @param varSpace
 *          A Kettle variable space for resolving VFS config parameters
 * @param scheme
 *          The VFS scheme (FILE, HTTP, SFTP, etc...)
 * @return A FileSystemConfigBuilder that can translate Kettle variables into VFS config parameters
 * @throws IOException
 */
public static IKettleFileSystemConfigBuilder getConfigBuilder( VariableSpace varSpace, String scheme ) throws IOException {
  IKettleFileSystemConfigBuilder result = null;

  // Attempt to load the Config Builder from a variable: vfs.config.parser = class
  String parserClass = varSpace.getVariable( "vfs." + scheme + ".config.parser" );

  if ( parserClass != null ) {
    try {
      Class<?> configBuilderClass =
        KettleFileSystemConfigBuilderFactory.class.getClassLoader().loadClass( parserClass );
      Method mGetInstance = configBuilderClass.getMethod( "getInstance" );
      if ( ( mGetInstance != null )
        && ( IKettleFileSystemConfigBuilder.class.isAssignableFrom( mGetInstance.getReturnType() ) ) ) {
        result = (IKettleFileSystemConfigBuilder) mGetInstance.invoke( null );
      } else {
        result = (IKettleFileSystemConfigBuilder) configBuilderClass.newInstance();
      }
    } catch ( Exception e ) {
      // Failed to load custom parser. Throw exception.
      throw new IOException( BaseMessages.getString( PKG, "CustomVfsSettingsParser.Log.FailedToLoad" ) );
    }
  } else {
    // No custom parser requested, load default
    if ( scheme.equalsIgnoreCase( "sftp" ) ) {
      result = KettleSftpFileSystemConfigBuilder.getInstance();
    } else {
      result = KettleGenericFileSystemConfigBuilder.getInstance();
    }
  }

  return result;
}
 
Example #28
Source File: MongoDbOutputData.java    From pentaho-mongodb-plugin with Apache License 2.0 5 votes vote down vote up
/**
 * Initialize field paths
 *
 * @param vars variables to use
 * @throws KettleException if a problem occurs
 */
public void init( VariableSpace vars ) throws KettleException {
  if ( m_userFields != null ) {
    for ( MongoDbOutputMeta.MongoField f : m_userFields ) {
      f.init( vars );
    }
  }
}
 
Example #29
Source File: BeamConsumeMeta.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore )
  throws KettleStepException {

  ValueMetaInterface keyValueMeta = new ValueMetaString( space.environmentSubstitute( keyField ) );
  keyValueMeta.setOrigin( name );
  inputRowMeta.addValueMeta( keyValueMeta );

  ValueMetaInterface messageValueMeta = new ValueMetaString( space.environmentSubstitute( messageField ) );
  messageValueMeta.setOrigin( name );
  inputRowMeta.addValueMeta( messageValueMeta );
}
 
Example #30
Source File: PDI_6976_Test.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testVerifyNoPreviousStep() {
  LoadFileInputMeta spy = spy( new LoadFileInputMeta() );

  FileInputList fileInputList = mock( FileInputList.class );
  List<FileObject> files = when( mock( List.class ).size() ).thenReturn( 1 ).getMock();
  doReturn( files ).when( fileInputList ).getFiles();
  doReturn( fileInputList ).when( spy ).getFiles( any( VariableSpace.class ) );

  @SuppressWarnings( "unchecked" )
  List<CheckResultInterface> validationResults = mock( List.class );

  // Check we do not get validation errors
  doAnswer( new Answer<Object>() {
    @Override
    public Object answer( InvocationOnMock invocation ) throws Throwable {
      if ( ( (CheckResultInterface) invocation.getArguments()[0] ).getType() != CheckResultInterface.TYPE_RESULT_OK ) {
        TestCase.fail( "We've got validation error" );
      }

      return null;
    }
  } ).when( validationResults ).add( any( CheckResultInterface.class ) );

  spy.check( validationResults, mock( TransMeta.class ), mock( StepMeta.class ), mock( RowMetaInterface.class ),
    new String[] {}, new String[] { "File content", "File size" }, mock( RowMetaInterface.class ),
    mock( VariableSpace.class ), mock( Repository.class ), mock( IMetaStore.class ) );
}