Java Code Examples for org.pentaho.metastore.api.IMetaStore

The following examples show how to use org.pentaho.metastore.api.IMetaStore. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: pentaho-kettle   Source File: JobEntryXMLWellFormed.java    License: Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
    List<SlaveServer> slaveServers ) throws KettleException {
  try {
    arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );

    nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" );
    success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" );
    resultfilenames = rep.getJobEntryAttributeString( id_jobentry, "resultfilenames" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    source_filefolder = new String[argnr];
    wildcard = new String[argnr];

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" );
      wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
    }
  } catch ( KettleException dbe ) {

    throw new KettleException( BaseMessages.getString( PKG, "JobXMLWellFormed.Error.Exception.UnableLoadRep" )
        + id_jobentry, dbe );
  }
}
 
Example 2
Source Project: pentaho-kettle   Source File: PaloDimInputMeta.java    License: Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId idStep, List<DatabaseMeta> databases )
  throws KettleException {
  try {
    this.databaseMeta = rep.loadDatabaseMetaFromStepAttribute( idStep, "connection", databases );
    this.dimension = rep.getStepAttributeString( idStep, "dimension" );
    this.baseElementsOnly = rep.getStepAttributeBoolean( idStep, "baseElementsOnly" );

    int nrLevels = rep.countNrStepAttributes( idStep, "levelname" );

    for ( int i = 0; i < nrLevels; i++ ) {
      String levelName = rep.getStepAttributeString( idStep, i, "levelname" );
      int levelNumber = (int) rep.getStepAttributeInteger( idStep, i, "levelnumber" );
      String fieldName = rep.getStepAttributeString( idStep, i, "fieldname" );
      String fieldType = rep.getStepAttributeString( idStep, i, "fieldtype" );
      this.levels.add( new PaloDimensionLevel( levelName, levelNumber, fieldName, fieldType ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example 3
Source Project: pentaho-kettle   Source File: MappingInputMeta.java    License: Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step )
  throws KettleException {
  try {
    for ( int i = 0; i < fieldName.length; i++ ) {
      if ( fieldName[ i ] != null && fieldName[ i ].length() != 0 ) {
        rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fieldName[ i ] );
        rep.saveStepAttribute( id_transformation, id_step, i, "field_type",
          ValueMetaFactory.getValueMetaName( fieldType[ i ] ) );
        rep.saveStepAttribute( id_transformation, id_step, i, "field_length", fieldLength[ i ] );
        rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", fieldPrecision[ i ] );
      }
    }

    rep.saveStepAttribute(
      id_transformation, id_step, "select_unspecified", selectingAndSortingUnspecifiedFields );
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString( PKG, "MappingInputMeta.Exception.UnableToSaveStepInfo" )
      + id_step, e );
  }
}
 
Example 4
Source Project: pentaho-kettle   Source File: AggregateRowsMeta.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {

  try {
    int nrfields = rep.countNrStepAttributes( id_step, "field_name" );

    allocate( nrfields );

    for ( int i = 0; i < nrfields; i++ ) {
      fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" );
      fieldNewName[i] = rep.getStepAttributeString( id_step, i, "field_rename" );
      aggregateType[i] = getType( rep.getStepAttributeString( id_step, i, "field_type" ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "AggregateRowsMeta.Exception.UnexpectedErrorWhileReadingStepInfo" ), e );
  }

}
 
Example 5
Source Project: pentaho-kettle   Source File: StepsMetricsMeta.java    License: Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
  try {
    for ( int i = 0; i < stepName.length; i++ ) {
      rep.saveStepAttribute( id_transformation, id_step, i, "step_name", stepName[i] );
      rep.saveStepAttribute( id_transformation, id_step, i, "step_CopyNr", stepCopyNr[i] );
      rep.saveStepAttribute( id_transformation, id_step, i, "step_required", stepRequired[i] );
    }
    rep.saveStepAttribute( id_transformation, id_step, "stepnamefield", stepnamefield );
    rep.saveStepAttribute( id_transformation, id_step, "stepidfield", stepidfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesinputfield", steplinesinputfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesoutputfield", steplinesoutputfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesreadfield", steplinesreadfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplineswrittentfield", steplineswrittentfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesupdatedfield", steplinesupdatedfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplineserrorsfield", steplineserrorsfield );
    rep.saveStepAttribute( id_transformation, id_step, "stepsecondsfield", stepsecondsfield );
  } catch ( Exception e ) {
    throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e );
  }
}
 
Example 6
Source Project: pentaho-kettle   Source File: StreamLookupTest.java    License: Apache License 2.0 6 votes vote down vote up
private StreamLookupMeta mockProcessRowMeta( boolean memoryPreservationActive ) throws KettleStepException {
  StreamLookupMeta meta = smh.processRowsStepMetaInterface;

  StepMeta lookupStepMeta = when( mock( StepMeta.class ).getName() ).thenReturn( "Lookup" ).getMock();
  doReturn( lookupStepMeta ).when( smh.transMeta ).findStep( "Lookup" );

  StepIOMeta stepIOMeta = new StepIOMeta( true, true, false, false, false, false );
  stepIOMeta.addStream( new Stream( StreamInterface.StreamType.INFO, lookupStepMeta, null, StreamIcon.INFO, null ) );

  doReturn( stepIOMeta ).when( meta ).getStepIOMeta();
  doReturn( new String[] { "Id" } ).when( meta ).getKeylookup();
  doReturn( new String[] { "Id" } ).when( meta ).getKeystream();
  doReturn( new String[] { "Value" } ).when( meta ).getValue();
  doReturn( memoryPreservationActive ).when( meta ).isMemoryPreservationActive();
  doReturn( false ).when( meta ).isUsingSortedList();
  doReturn( false ).when( meta ).isUsingIntegerPair();
  doReturn( new int[] { -1 } ).when( meta ).getValueDefaultType();
  doReturn( new String[] { "" } ).when( meta ).getValueDefault();
  doReturn( new String[] { "Value" } ).when( meta ).getValueName();
  doReturn( new String[] { "Value" } ).when( meta ).getValue();
  doCallRealMethod().when( meta ).getFields( any( RowMetaInterface.class ), anyString(), any( RowMetaInterface[].class ), any( StepMeta.class ),
    any( VariableSpace.class ), any( Repository.class ), any( IMetaStore.class ) );

  return meta;
}
 
Example 7
Source Project: pentaho-kettle   Source File: AnalyticQueryMeta.java    License: Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {

    int groupsize = rep.countNrStepAttributes( id_step, "group_name" );
    int nrvalues = rep.countNrStepAttributes( id_step, "aggregate_name" );

    allocate( groupsize, nrvalues );

    for ( int i = 0; i < groupsize; i++ ) {
      groupField[i] = rep.getStepAttributeString( id_step, i, "group_name" );
    }

    for ( int i = 0; i < nrvalues; i++ ) {
      aggregateField[i] = rep.getStepAttributeString( id_step, i, "aggregate_name" );
      subjectField[i] = rep.getStepAttributeString( id_step, i, "aggregate_subject" );
      aggregateType[i] = getType( rep.getStepAttributeString( id_step, i, "aggregate_type" ) );
      valueField[i] = (int) rep.getStepAttributeInteger( id_step, i, "aggregate_value_field" );
    }

  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "AnalyticQueryMeta.Exception.UnexpectedErrorInReadingStepInfoFromRepository" ), e );
  }
}
 
Example 8
Source Project: pentaho-kettle   Source File: FieldsChangeSequenceMeta.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {
    start = rep.getStepAttributeString( id_step, "start" );
    increment = rep.getStepAttributeString( id_step, "increment" );
    resultfieldName = rep.getStepAttributeString( id_step, "resultfieldName" );
    int nrfields = rep.countNrStepAttributes( id_step, "field_name" );

    allocate( nrfields );

    for ( int i = 0; i < nrfields; i++ ) {
      fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" );
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example 9
Source Project: pentaho-kettle   Source File: JobEntryTruncateTables.java    License: Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );

    String dbname = XMLHandler.getTagValue( entrynode, "connection" );
    this.connection = DatabaseMeta.findDatabase( databases, dbname );
    this.argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) );

    Node fields = XMLHandler.getSubNode( entrynode, "fields" );

    // How many field arguments?
    int nrFields = XMLHandler.countNodes( fields, "field" );
    allocate( nrFields );

    // Read them all...
    for ( int i = 0; i < nrFields; i++ ) {
      Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
      this.arguments[i] = XMLHandler.getTagValue( fnode, "name" );
      this.schemaname[i] = XMLHandler.getTagValue( fnode, "schemaname" );
    }
  } catch ( KettleException e ) {
    throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableLoadXML" ), e );
  }
}
 
Example 10
Source Project: pentaho-kettle   Source File: CombinationLookupMeta.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep,
                       VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  ValueMetaInterface v = new ValueMetaInteger( technicalKeyField );
  v.setLength( 10 );
  v.setPrecision( 0 );
  v.setOrigin( origin );
  row.addValueMeta( v );

  if ( replaceFields ) {
    for ( int i = 0; i < keyField.length; i++ ) {
      int idx = row.indexOfValue( keyField[ i ] );
      if ( idx >= 0 ) {
        row.removeValueMeta( idx );
      }
    }
  }
}
 
Example 11
Source Project: pentaho-kettle   Source File: GetFileNamesMeta.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    // In case the name of the file comes from previous steps, forget about this!
    //
    if ( !filefield ) {

      // Replace the filename ONLY (folder or filename)
      //
      for ( int i = 0; i < fileName.length; i++ ) {
        FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space );
        fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) );
      }
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example 12
Source Project: pentaho-kettle   Source File: JobEntryTruncateTables.java    License: Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", this.argFromPrevious );
    // save the arguments...
    if ( this.arguments != null ) {
      for ( int i = 0; i < this.arguments.length; i++ ) {
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", this.arguments[i] );
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "schemaname", this.schemaname[i] );
      }
    }
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException(
      BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableSaveRep", "" + id_job ), dbe );
  }
}
 
Example 13
Source Project: pentaho-kettle   Source File: LoadSaveTester.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * @deprecated the {@link #testSerialization()} method should be used instead,
 *             as additional tests may be added in the future to cover other
 *             topics related to step serialization
 * @throws KettleException
 */
@Deprecated
// TODO Change method visibility to protected
public void testRepoRoundTrip() throws KettleException {
  T metaToSave = createMeta();
  if ( initializer != null ) {
    initializer.modify( metaToSave );
  }
  Map<String, FieldLoadSaveValidator<?>> validatorMap =
    createValidatorMapAndInvokeSetters( repoAttributes, metaToSave );
  T metaLoaded = createMeta();
  Repository rep = new MemoryRepository();
  metaToSave.saveRep( rep, null, null, null );
  metaLoaded.readRep( rep, (IMetaStore) null, null, databases );
  validateLoadedMeta( repoAttributes, validatorMap, metaToSave, metaLoaded );
}
 
Example 14
Source Project: pentaho-kettle   Source File: JobEntryCheckFilesLocked.java    License: Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, ARG_FROM_PREVIOUS_ATTR ) );
    includeSubfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, INCLUDE_SUBFOLDERS_ATTR ) );

    Node fields = XMLHandler.getSubNode( entrynode, "fields" );

    // How many field arguments?
    int nrFields = XMLHandler.countNodes( fields, "field" );
    allocate( nrFields );

    // Read them all...
    for ( int i = 0; i < nrFields; i++ ) {
      Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );

      arguments[i] = XMLHandler.getTagValue( fnode, NAME_ATTR );
      filemasks[i] = XMLHandler.getTagValue( fnode, FILE_MASK_ATTR );
    }
  } catch ( KettleXMLException xe ) {
    throw new KettleXMLException(
      BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.UnableToLoadFromXml" ), xe );
  }
}
 
Example 15
Source Project: pentaho-kettle   Source File: JoinRowsMeta.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
  try {
    rep.saveStepAttribute( id_transformation, id_step, "directory", directory );
    rep.saveStepAttribute( id_transformation, id_step, "prefix", prefix );
    rep.saveStepAttribute( id_transformation, id_step, "cache_size", cacheSize );

    if ( mainStepname == null ) {
      mainStepname = getLookupStepname();
    }
    rep.saveStepAttribute( id_transformation, id_step, "main", mainStepname );

    rep.saveConditionStepAttribute( id_transformation, id_step, "id_condition", condition );
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JoinRowsMeta.Exception.UnableToSaveStepInfoToRepository" )
      + id_step, e );
  }
}
 
Example 16
Source Project: pentaho-kettle   Source File: MonetDBBulkLoaderMeta.java    License: Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
    RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
    IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
              .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
              fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 17
Source Project: pentaho-kettle   Source File: AbstractMetaTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test( expected = KettlePluginException.class )
public void testGetSetImportMetaStore() throws Exception {
  assertNull( meta.getMetaStore() );
  meta.importFromMetaStore();
  IMetaStore metastore = mock( IMetaStore.class );
  meta.setMetaStore( metastore );
  assertEquals( metastore, meta.getMetaStore() );
  meta.importFromMetaStore();
  IMetaStoreElementType elementType = mock( IMetaStoreElementType.class );
  when( metastore.getElementTypeByName(
    PentahoDefaults.NAMESPACE, PentahoDefaults.DATABASE_CONNECTION_ELEMENT_TYPE_NAME ) ).thenReturn( elementType );
  when( metastore.getElements( PentahoDefaults.NAMESPACE, elementType ) )
    .thenReturn( new ArrayList<IMetaStoreElement>() );
  meta.importFromMetaStore();
  IMetaStoreElement element = mock( IMetaStoreElement.class );
  when( metastore.getElements( PentahoDefaults.NAMESPACE, elementType ) )
    .thenReturn( Arrays.asList( element ) );
  meta.importFromMetaStore();
}
 
Example 18
Source Project: pentaho-kettle   Source File: JobEntryPing.java    License: Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    hostname = rep.getJobEntryAttributeString( id_jobentry, "hostname" );
    nbrPackets = rep.getJobEntryAttributeString( id_jobentry, "nbr_packets" );

    // TODO: The following lines may be removed 3 versions after 2.5.0
    String nbrPaquets = rep.getJobEntryAttributeString( id_jobentry, "nbrpaquets" );
    if ( nbrPackets == null && nbrPaquets != null ) {
      // if only nbrpaquets exists this means that the file was
      // save by a version 2.5.0 ping job entry
      nbrPackets = nbrPaquets;
    }
    timeout = rep.getJobEntryAttributeString( id_jobentry, "timeout" );

    pingtype = rep.getJobEntryAttributeString( id_jobentry, "pingtype" );
    if ( Utils.isEmpty( pingtype ) ) {
      pingtype = classicPing;
      ipingtype = iclassicPing;
    } else {
      if ( pingtype.equals( systemPing ) ) {
        ipingtype = isystemPing;
      } else if ( pingtype.equals( bothPings ) ) {
        ipingtype = ibothPings;
      } else {
        ipingtype = iclassicPing;
      }
    }
  } catch ( KettleException dbe ) {
    throw new KettleException(
      "Unable to load job entry of type 'ping' exists from the repository for id_jobentry=" + id_jobentry, dbe );
  }
}
 
Example 19
Source Project: pentaho-kettle   Source File: TableInputMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {

  // if ( stepMeta.getName().equalsIgnoreCase( "cdc_cust" ) ) {
  //   System.out.println( "HERE!" );
  // }

  // Find the lookupfields...
  RowMetaInterface out = new RowMeta();
  // TODO: this builds, but does it work in all cases.
  getFields( out, stepMeta.getName(), new RowMetaInterface[] { info }, null, transMeta, repository, metaStore );

  if ( out != null ) {
    for ( int i = 0; i < out.size(); i++ ) {
      ValueMetaInterface outvalue = out.getValueMeta( i );
      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_READ, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), "", outvalue.getName(), outvalue.getName(), stepMeta.getName(), sql,
          "read from one or more database tables via SQL statement" );
      impact.add( ii );

    }
  }
}
 
Example 20
Source Project: pentaho-kettle   Source File: MultiMergeJoinMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
  throws KettleException {
  try {
    int nrKeys = rep.countNrStepAttributes( id_step, "keys" );

    allocateKeys( nrKeys );

    for ( int i = 0; i < nrKeys; i++ ) {
      keyFields[i] = rep.getStepAttributeString( id_step, i, "keys" );
    }

    long nInputStreams = rep.getStepAttributeInteger( id_step, "number_input" );

    allocateInputSteps( (int) nInputStreams );

    for ( int i = 0; i < nInputStreams; i++ ) {
      inputSteps[i] = rep.getStepAttributeString( id_step, "step" + i );
    }
    // This next bit is completely unnecessary if you just pass the step name into
    // the constructor above. That sets the subject to the step name in one pass
    // instead of a second one.
    // MB - 5/2016
    //
    // List<StreamInterface> infoStreams = getStepIOMeta().getInfoStreams();
    // for ( int i = 0; i < infoStreams.size(); i++ ) {
    //   infoStreams.get( i ).setSubject( rep.getStepAttributeString( id_step, "step" + i ) );
    // }

    joinType = rep.getStepAttributeString( id_step, "join_type" );
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString( PKG,
        "MultiMergeJoinMeta.Exception.UnexpectedErrorReadingStepInfo" ), e );
  }
}
 
Example 21
Source Project: pentaho-kettle   Source File: OlapInputMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space,
  Repository repository, IMetaStore metaStore ) {
  // TODO: perform tests to see if connection is valid
  // CheckResult cr;
  // cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, "Please select or create a connection to use",
  // stepMeta);
  // remarks.add(cr);
}
 
Example 22
Source Project: pentaho-kettle   Source File: StepMetastructureMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {
    outputRowcount = rep.getStepAttributeBoolean( id_step, "outputRowcount" );
    rowcountField = rep.getStepAttributeString( id_step, "rowcountField" );

  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example 23
Source Project: pentaho-kettle   Source File: JobEntrySSH2PUT.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    serverName = rep.getJobEntryAttributeString( id_jobentry, "servername" );
    userName = rep.getJobEntryAttributeString( id_jobentry, "username" );
    password =
      Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString( id_jobentry, "password" ) );
    serverPort = rep.getJobEntryAttributeString( id_jobentry, "serverport" );
    ftpDirectory = rep.getJobEntryAttributeString( id_jobentry, "ftpdirectory" );
    localDirectory = rep.getJobEntryAttributeString( id_jobentry, "localdirectory" );
    wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" );
    onlyGettingNewFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "only_new" );

    usehttpproxy = rep.getJobEntryAttributeBoolean( id_jobentry, "usehttpproxy" );
    httpproxyhost = rep.getJobEntryAttributeString( id_jobentry, "httpproxyhost" );
    httpproxyusername = rep.getJobEntryAttributeString( id_jobentry, "httpproxyusername" );
    httpProxyPassword = rep.getJobEntryAttributeString( id_jobentry, "httpproxypassword" );

    publicpublickey = rep.getJobEntryAttributeBoolean( id_jobentry, "publicpublickey" );
    keyFilename = rep.getJobEntryAttributeString( id_jobentry, "keyfilename" );
    keyFilePass = rep.getJobEntryAttributeString( id_jobentry, "keyfilepass" );

    useBasicAuthentication = rep.getJobEntryAttributeBoolean( id_jobentry, "usebasicauthentication" );
    createRemoteFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createremotefolder" );

    afterFtpPut = rep.getJobEntryAttributeString( id_jobentry, "afterftpput" );
    destinationfolder = rep.getJobEntryAttributeString( id_jobentry, "destinationfolder" );

    createDestinationFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createdestinationfolder" );
    cachehostkey = rep.getJobEntryAttributeBoolean( id_jobentry, "cachehostkey" );
    timeout = (int) rep.getJobEntryAttributeInteger( id_jobentry, "timeout" );

  } catch ( KettleException dbe ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JobSSH2PUT.Log.UnableLoadRep", "" + id_jobentry, dbe.getMessage() ) );
  }
}
 
Example 24
Source Project: pentaho-kettle   Source File: FileExistsMeta.java    License: Apache License 2.0 5 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {
    filenamefield = rep.getStepAttributeString( id_step, "filenamefield" );
    resultfieldname = rep.getStepAttributeString( id_step, "resultfieldname" );
    includefiletype = rep.getStepAttributeBoolean( id_step, "includefiletype" );
    filetypefieldname = rep.getStepAttributeString( id_step, "filetypefieldname" );
    addresultfilenames = rep.getStepAttributeBoolean( id_step, "addresultfilenames" );
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "FileExistsMeta.Exception.UnexpectedErrorReadingStepInfo" ), e );
  }
}
 
Example 25
Source Project: pentaho-kettle   Source File: OpenERPObjectOutputMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
                       VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  if ( outputIDField ) {
    if ( outputIDFieldName == null || outputIDFieldName.length() == 0 ) {
      throw new KettleStepException( "Error while retrieving fields", new Exception( "ID field name is null" ) );
    }

    ValueMetaInterface v = new ValueMetaInteger( outputIDFieldName );
    v.setOrigin( name );

    row.addValueMeta( v );
  }
}
 
Example 26
Source Project: pentaho-kettle   Source File: RandomValueMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {
    int nrfields = rep.countNrStepAttributes( id_step, "field_name" );

    allocate( nrfields );

    for ( int i = 0; i < nrfields; i++ ) {
      fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" );
      fieldType[i] = getType( rep.getStepAttributeString( id_step, i, "field_type" ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example 27
Source Project: pentaho-kettle   Source File: JobEntrySetVariablesTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testJobEntrySetVariablesExecute_VARIABLE_TYPE_CURRENT_JOB_NullVariable() throws Exception {
  List<DatabaseMeta> databases = mock( List.class );
  List<SlaveServer> slaveServers = mock( List.class );
  Repository repository = mock( Repository.class );
  IMetaStore metaStore = mock( IMetaStore.class );
  entry.loadXML( getEntryNode( "nullVariable", null, "CURRENT_JOB" ), databases, slaveServers, repository, metaStore );
  Result result = entry.execute( new Result(), 0 );
  assertTrue( "Result should be true", result.getResult() );
  assertNull( entry.getVariable( "nullVariable" )  );
}
 
Example 28
Source Project: pentaho-kettle   Source File: ConnectionManager.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get the named connection from a specified meta store
 *
 * @param metaStore A meta store
 * @param key       The provider key
 * @param name      The connection name
 * @return The named connection details
 */
public ConnectionDetails getConnectionDetails( IMetaStore metaStore, String key, String name ) {
  ConnectionProvider<? extends ConnectionDetails> connectionProvider = getConnectionProvider( key );
  if ( connectionProvider != null ) {
    Class<? extends ConnectionDetails> clazz = connectionProvider.getClassType();
    return loadElement( getMetaStoreFactory( metaStore, clazz ), name );
  }
  return null;
}
 
Example 29
Source Project: kettle-beam   Source File: BeamBQOutputMeta.java    License: Apache License 2.0 5 votes vote down vote up
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore )
  throws KettleStepException {

  // This is an endpoint in Beam, produces no further output
  //
  inputRowMeta.clear();
}
 
Example 30
Source Project: pentaho-kettle   Source File: JobEntryTransTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testExecute_result_false_get_transMeta_exception() throws KettleException {
  JobEntryTrans jobEntryTrans = spy( new JobEntryTrans( JOB_ENTRY_TRANS_NAME ) );
  jobEntryTrans.setSpecificationMethod( ObjectLocationSpecificationMethod.FILENAME );
  jobEntryTrans.setParentJob( mock( Job.class ) );
  JobMeta mockJobMeta = mock( JobMeta.class );
  jobEntryTrans.setParentJobMeta( mockJobMeta );
  jobEntryTrans.setLogLevel( LogLevel.NOTHING );
  doThrow( new KettleException( "Error while loading transformation" ) ).when( jobEntryTrans ).getTransMeta( any(
      Repository.class ), any( IMetaStore.class ), any( VariableSpace.class ) );
  Result result = mock( Result.class );

  jobEntryTrans.execute( result, 1 );
  verify( result ).setResult( false );
}