org.pentaho.metastore.api.IMetaStore Java Examples

The following examples show how to use org.pentaho.metastore.api.IMetaStore. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FieldsChangeSequenceMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {
    start = rep.getStepAttributeString( id_step, "start" );
    increment = rep.getStepAttributeString( id_step, "increment" );
    resultfieldName = rep.getStepAttributeString( id_step, "resultfieldName" );
    int nrfields = rep.countNrStepAttributes( id_step, "field_name" );

    allocate( nrfields );

    for ( int i = 0; i < nrfields; i++ ) {
      fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" );
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example #2
Source File: StepsMetricsMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
  try {
    for ( int i = 0; i < stepName.length; i++ ) {
      rep.saveStepAttribute( id_transformation, id_step, i, "step_name", stepName[i] );
      rep.saveStepAttribute( id_transformation, id_step, i, "step_CopyNr", stepCopyNr[i] );
      rep.saveStepAttribute( id_transformation, id_step, i, "step_required", stepRequired[i] );
    }
    rep.saveStepAttribute( id_transformation, id_step, "stepnamefield", stepnamefield );
    rep.saveStepAttribute( id_transformation, id_step, "stepidfield", stepidfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesinputfield", steplinesinputfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesoutputfield", steplinesoutputfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesreadfield", steplinesreadfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplineswrittentfield", steplineswrittentfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplinesupdatedfield", steplinesupdatedfield );
    rep.saveStepAttribute( id_transformation, id_step, "steplineserrorsfield", steplineserrorsfield );
    rep.saveStepAttribute( id_transformation, id_step, "stepsecondsfield", stepsecondsfield );
  } catch ( Exception e ) {
    throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e );
  }
}
 
Example #3
Source File: JobEntryCheckFilesLocked.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, ARG_FROM_PREVIOUS_ATTR ) );
    includeSubfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, INCLUDE_SUBFOLDERS_ATTR ) );

    Node fields = XMLHandler.getSubNode( entrynode, "fields" );

    // How many field arguments?
    int nrFields = XMLHandler.countNodes( fields, "field" );
    allocate( nrFields );

    // Read them all...
    for ( int i = 0; i < nrFields; i++ ) {
      Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );

      arguments[i] = XMLHandler.getTagValue( fnode, NAME_ATTR );
      filemasks[i] = XMLHandler.getTagValue( fnode, FILE_MASK_ATTR );
    }
  } catch ( KettleXMLException xe ) {
    throw new KettleXMLException(
      BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.UnableToLoadFromXml" ), xe );
  }
}
 
Example #4
Source File: JoinRowsMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
  try {
    rep.saveStepAttribute( id_transformation, id_step, "directory", directory );
    rep.saveStepAttribute( id_transformation, id_step, "prefix", prefix );
    rep.saveStepAttribute( id_transformation, id_step, "cache_size", cacheSize );

    if ( mainStepname == null ) {
      mainStepname = getLookupStepname();
    }
    rep.saveStepAttribute( id_transformation, id_step, "main", mainStepname );

    rep.saveConditionStepAttribute( id_transformation, id_step, "id_condition", condition );
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "JoinRowsMeta.Exception.UnableToSaveStepInfoToRepository" )
      + id_step, e );
  }
}
 
Example #5
Source File: LoadSaveTester.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * @deprecated the {@link #testSerialization()} method should be used instead,
 *             as additional tests may be added in the future to cover other
 *             topics related to step serialization
 * @throws KettleException
 */
@Deprecated
// TODO Change method visibility to protected
public void testRepoRoundTrip() throws KettleException {
  T metaToSave = createMeta();
  if ( initializer != null ) {
    initializer.modify( metaToSave );
  }
  Map<String, FieldLoadSaveValidator<?>> validatorMap =
    createValidatorMapAndInvokeSetters( repoAttributes, metaToSave );
  T metaLoaded = createMeta();
  Repository rep = new MemoryRepository();
  metaToSave.saveRep( rep, null, null, null );
  metaLoaded.readRep( rep, (IMetaStore) null, null, databases );
  validateLoadedMeta( repoAttributes, validatorMap, metaToSave, metaLoaded );
}
 
Example #6
Source File: CombinationLookupMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep,
                       VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  ValueMetaInterface v = new ValueMetaInteger( technicalKeyField );
  v.setLength( 10 );
  v.setPrecision( 0 );
  v.setOrigin( origin );
  row.addValueMeta( v );

  if ( replaceFields ) {
    for ( int i = 0; i < keyField.length; i++ ) {
      int idx = row.indexOfValue( keyField[ i ] );
      if ( idx >= 0 ) {
        row.removeValueMeta( idx );
      }
    }
  }
}
 
Example #7
Source File: AggregateRowsMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {

  try {
    int nrfields = rep.countNrStepAttributes( id_step, "field_name" );

    allocate( nrfields );

    for ( int i = 0; i < nrfields; i++ ) {
      fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" );
      fieldNewName[i] = rep.getStepAttributeString( id_step, i, "field_rename" );
      aggregateType[i] = getType( rep.getStepAttributeString( id_step, i, "field_type" ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "AggregateRowsMeta.Exception.UnexpectedErrorWhileReadingStepInfo" ), e );
  }

}
 
Example #8
Source File: MappingInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step )
  throws KettleException {
  try {
    for ( int i = 0; i < fieldName.length; i++ ) {
      if ( fieldName[ i ] != null && fieldName[ i ].length() != 0 ) {
        rep.saveStepAttribute( id_transformation, id_step, i, "field_name", fieldName[ i ] );
        rep.saveStepAttribute( id_transformation, id_step, i, "field_type",
          ValueMetaFactory.getValueMetaName( fieldType[ i ] ) );
        rep.saveStepAttribute( id_transformation, id_step, i, "field_length", fieldLength[ i ] );
        rep.saveStepAttribute( id_transformation, id_step, i, "field_precision", fieldPrecision[ i ] );
      }
    }

    rep.saveStepAttribute(
      id_transformation, id_step, "select_unspecified", selectingAndSortingUnspecifiedFields );
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString( PKG, "MappingInputMeta.Exception.UnableToSaveStepInfo" )
      + id_step, e );
  }
}
 
Example #9
Source File: StreamLookupTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private StreamLookupMeta mockProcessRowMeta( boolean memoryPreservationActive ) throws KettleStepException {
  StreamLookupMeta meta = smh.processRowsStepMetaInterface;

  StepMeta lookupStepMeta = when( mock( StepMeta.class ).getName() ).thenReturn( "Lookup" ).getMock();
  doReturn( lookupStepMeta ).when( smh.transMeta ).findStep( "Lookup" );

  StepIOMeta stepIOMeta = new StepIOMeta( true, true, false, false, false, false );
  stepIOMeta.addStream( new Stream( StreamInterface.StreamType.INFO, lookupStepMeta, null, StreamIcon.INFO, null ) );

  doReturn( stepIOMeta ).when( meta ).getStepIOMeta();
  doReturn( new String[] { "Id" } ).when( meta ).getKeylookup();
  doReturn( new String[] { "Id" } ).when( meta ).getKeystream();
  doReturn( new String[] { "Value" } ).when( meta ).getValue();
  doReturn( memoryPreservationActive ).when( meta ).isMemoryPreservationActive();
  doReturn( false ).when( meta ).isUsingSortedList();
  doReturn( false ).when( meta ).isUsingIntegerPair();
  doReturn( new int[] { -1 } ).when( meta ).getValueDefaultType();
  doReturn( new String[] { "" } ).when( meta ).getValueDefault();
  doReturn( new String[] { "Value" } ).when( meta ).getValueName();
  doReturn( new String[] { "Value" } ).when( meta ).getValue();
  doCallRealMethod().when( meta ).getFields( any( RowMetaInterface.class ), anyString(), any( RowMetaInterface[].class ), any( StepMeta.class ),
    any( VariableSpace.class ), any( Repository.class ), any( IMetaStore.class ) );

  return meta;
}
 
Example #10
Source File: PaloDimInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId idStep, List<DatabaseMeta> databases )
  throws KettleException {
  try {
    this.databaseMeta = rep.loadDatabaseMetaFromStepAttribute( idStep, "connection", databases );
    this.dimension = rep.getStepAttributeString( idStep, "dimension" );
    this.baseElementsOnly = rep.getStepAttributeBoolean( idStep, "baseElementsOnly" );

    int nrLevels = rep.countNrStepAttributes( idStep, "levelname" );

    for ( int i = 0; i < nrLevels; i++ ) {
      String levelName = rep.getStepAttributeString( idStep, i, "levelname" );
      int levelNumber = (int) rep.getStepAttributeInteger( idStep, i, "levelnumber" );
      String fieldName = rep.getStepAttributeString( idStep, i, "fieldname" );
      String fieldType = rep.getStepAttributeString( idStep, i, "fieldtype" );
      this.levels.add( new PaloDimensionLevel( levelName, levelNumber, fieldName, fieldType ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example #11
Source File: MonetDBBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
    RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
    IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
              .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
              fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example #12
Source File: GetFileNamesMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
@Override
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    // In case the name of the file comes from previous steps, forget about this!
    //
    if ( !filefield ) {

      // Replace the filename ONLY (folder or filename)
      //
      for ( int i = 0; i < fileName.length; i++ ) {
        FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space );
        fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) );
      }
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example #13
Source File: AnalyticQueryMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {

    int groupsize = rep.countNrStepAttributes( id_step, "group_name" );
    int nrvalues = rep.countNrStepAttributes( id_step, "aggregate_name" );

    allocate( groupsize, nrvalues );

    for ( int i = 0; i < groupsize; i++ ) {
      groupField[i] = rep.getStepAttributeString( id_step, i, "group_name" );
    }

    for ( int i = 0; i < nrvalues; i++ ) {
      aggregateField[i] = rep.getStepAttributeString( id_step, i, "aggregate_name" );
      subjectField[i] = rep.getStepAttributeString( id_step, i, "aggregate_subject" );
      aggregateType[i] = getType( rep.getStepAttributeString( id_step, i, "aggregate_type" ) );
      valueField[i] = (int) rep.getStepAttributeInteger( id_step, i, "aggregate_value_field" );
    }

  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "AnalyticQueryMeta.Exception.UnexpectedErrorInReadingStepInfoFromRepository" ), e );
  }
}
 
Example #14
Source File: AbstractMetaTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test( expected = KettlePluginException.class )
public void testGetSetImportMetaStore() throws Exception {
  assertNull( meta.getMetaStore() );
  meta.importFromMetaStore();
  IMetaStore metastore = mock( IMetaStore.class );
  meta.setMetaStore( metastore );
  assertEquals( metastore, meta.getMetaStore() );
  meta.importFromMetaStore();
  IMetaStoreElementType elementType = mock( IMetaStoreElementType.class );
  when( metastore.getElementTypeByName(
    PentahoDefaults.NAMESPACE, PentahoDefaults.DATABASE_CONNECTION_ELEMENT_TYPE_NAME ) ).thenReturn( elementType );
  when( metastore.getElements( PentahoDefaults.NAMESPACE, elementType ) )
    .thenReturn( new ArrayList<IMetaStoreElement>() );
  meta.importFromMetaStore();
  IMetaStoreElement element = mock( IMetaStoreElement.class );
  when( metastore.getElements( PentahoDefaults.NAMESPACE, elementType ) )
    .thenReturn( Arrays.asList( element ) );
  meta.importFromMetaStore();
}
 
Example #15
Source File: JobEntryTruncateTables.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );

    String dbname = XMLHandler.getTagValue( entrynode, "connection" );
    this.connection = DatabaseMeta.findDatabase( databases, dbname );
    this.argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) );

    Node fields = XMLHandler.getSubNode( entrynode, "fields" );

    // How many field arguments?
    int nrFields = XMLHandler.countNodes( fields, "field" );
    allocate( nrFields );

    // Read them all...
    for ( int i = 0; i < nrFields; i++ ) {
      Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i );
      this.arguments[i] = XMLHandler.getTagValue( fnode, "name" );
      this.schemaname[i] = XMLHandler.getTagValue( fnode, "schemaname" );
    }
  } catch ( KettleException e ) {
    throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableLoadXML" ), e );
  }
}
 
Example #16
Source File: JobEntryXMLWellFormed.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
    List<SlaveServer> slaveServers ) throws KettleException {
  try {
    arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
    include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );

    nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" );
    success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" );
    resultfilenames = rep.getJobEntryAttributeString( id_jobentry, "resultfilenames" );

    // How many arguments?
    int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" );
    source_filefolder = new String[argnr];
    wildcard = new String[argnr];

    // Read them all...
    for ( int a = 0; a < argnr; a++ ) {
      source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" );
      wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" );
    }
  } catch ( KettleException dbe ) {

    throw new KettleException( BaseMessages.getString( PKG, "JobXMLWellFormed.Error.Exception.UnableLoadRep" )
        + id_jobentry, dbe );
  }
}
 
Example #17
Source File: JobEntryTruncateTables.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), "connection", "id_database", connection );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "arg_from_previous", this.argFromPrevious );
    // save the arguments...
    if ( this.arguments != null ) {
      for ( int i = 0; i < this.arguments.length; i++ ) {
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "name", this.arguments[i] );
        rep.saveJobEntryAttribute( id_job, getObjectId(), i, "schemaname", this.schemaname[i] );
      }
    }
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException(
      BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableSaveRep", "" + id_job ), dbe );
  }
}
 
Example #18
Source File: AddSequenceMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
  Repository repository, IMetaStore metaStore ) {
  SQLStatement retval = new SQLStatement( stepMeta.getName(), database, null ); // default: nothing to do!

  if ( useDatabase ) {
    // Otherwise, don't bother!
    if ( database != null ) {
      Database db = new Database( loggingObject, database );
      db.shareVariablesWith( transMeta );
      try {
        db.connect();
        if ( !db.checkSequenceExists( schemaName, sequenceName ) ) {
          String cr_table = db.getCreateSequenceStatement( sequenceName, startAt, incrementBy, maxValue, true );
          retval.setSQL( cr_table );
        } else {
          retval.setSQL( null ); // Empty string means: nothing to do: set it to null...
        }
      } catch ( KettleException e ) {
        retval.setError( BaseMessages.getString( PKG, "AddSequenceMeta.ErrorMessage.UnableToConnectDB" )
          + Const.CR + e.getMessage() );
      } finally {
        db.disconnect();
      }
    } else {
      retval.setError( BaseMessages.getString( PKG, "AddSequenceMeta.ErrorMessage.NoConnectionDefined" ) );
    }
  }

  return retval;
}
 
Example #19
Source File: BeamBQOutputMeta.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore )
  throws KettleStepException {

  // This is an endpoint in Beam, produces no further output
  //
  inputRowMeta.clear();
}
 
Example #20
Source File: JobEntryWriteToFile.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    filename = rep.getJobEntryAttributeString( id_jobentry, "filename" );
    createParentFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "createParentFolder" );
    appendFile = rep.getJobEntryAttributeBoolean( id_jobentry, "appendFile" );
    content = rep.getJobEntryAttributeString( id_jobentry, "content" );
    encoding = rep.getJobEntryAttributeString( id_jobentry, "encoding" );
  } catch ( KettleException dbe ) {
    throw new KettleException(
      "Unable to load job entry of type 'create file' from the repository for id_jobentry=" + id_jobentry, dbe );
  }
}
 
Example #21
Source File: JobEntryWriteToLog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "logmessage", logmessage );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "loglevel", ( entryLogLevel != null ? entryLogLevel
      .getCode() : "" ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "logsubject", logsubject );
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( BaseMessages.getString( PKG, "WriteToLog.Error.UnableToSaveToRepository.Label" )
      + id_job, dbe );
  }
}
 
Example #22
Source File: JobEntryFTPPUT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
                     Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    serverName = XMLHandler.getTagValue( entrynode, "servername" );
    serverPort = XMLHandler.getTagValue( entrynode, "serverport" );
    userName = XMLHandler.getTagValue( entrynode, "username" );
    password = Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "password" ) );
    remoteDirectory = XMLHandler.getTagValue( entrynode, "remoteDirectory" );
    localDirectory = XMLHandler.getTagValue( entrynode, "localDirectory" );
    wildcard = XMLHandler.getTagValue( entrynode, "wildcard" );
    binaryMode = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "binary" ) );
    timeout = Const.toInt( XMLHandler.getTagValue( entrynode, "timeout" ), 10000 );
    remove = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "remove" ) );
    onlyPuttingNewFiles = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "only_new" ) );
    activeConnection = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "active" ) );
    controlEncoding = XMLHandler.getTagValue( entrynode, "control_encoding" );

    proxyHost = XMLHandler.getTagValue( entrynode, "proxy_host" );
    proxyPort = XMLHandler.getTagValue( entrynode, "proxy_port" );
    proxyUsername = XMLHandler.getTagValue( entrynode, "proxy_username" );
    proxyPassword =
      Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "proxy_password" ) );
    socksProxyHost = XMLHandler.getTagValue( entrynode, "socksproxy_host" );
    socksProxyPort = XMLHandler.getTagValue( entrynode, "socksproxy_port" );
    socksProxyUsername = XMLHandler.getTagValue( entrynode, "socksproxy_username" );
    socksProxyPassword =
      Encr.decryptPasswordOptionallyEncrypted( XMLHandler.getTagValue( entrynode, "socksproxy_password" ) );

    if ( controlEncoding == null ) {
      // if we couldn't retrieve an encoding, assume it's an old instance and
      // put in the the encoding used before v 2.4.0
      controlEncoding = LEGACY_CONTROL_ENCODING;
    }
  } catch ( KettleXMLException xe ) {
    throw new KettleXMLException( BaseMessages.getString( PKG, "JobFTPPUT.Log.UnableToLoadFromXml" ), xe );
  }
}
 
Example #23
Source File: PDI_6976_Test.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testVerifyNoPreviousStep() {
  LoadFileInputMeta spy = spy( new LoadFileInputMeta() );

  FileInputList fileInputList = mock( FileInputList.class );
  List<FileObject> files = when( mock( List.class ).size() ).thenReturn( 1 ).getMock();
  doReturn( files ).when( fileInputList ).getFiles();
  doReturn( fileInputList ).when( spy ).getFiles( any( VariableSpace.class ) );

  @SuppressWarnings( "unchecked" )
  List<CheckResultInterface> validationResults = mock( List.class );

  // Check we do not get validation errors
  doAnswer( new Answer<Object>() {
    @Override
    public Object answer( InvocationOnMock invocation ) throws Throwable {
      if ( ( (CheckResultInterface) invocation.getArguments()[0] ).getType() != CheckResultInterface.TYPE_RESULT_OK ) {
        TestCase.fail( "We've got validation error" );
      }

      return null;
    }
  } ).when( validationResults ).add( any( CheckResultInterface.class ) );

  spy.check( validationResults, mock( TransMeta.class ), mock( StepMeta.class ), mock( RowMetaInterface.class ),
    new String[] {}, new String[] { "File content", "File size" }, mock( RowMetaInterface.class ),
    mock( VariableSpace.class ), mock( Repository.class ), mock( IMetaStore.class ) );
}
 
Example #24
Source File: SalesforceInsertMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  String realfieldname = space.environmentSubstitute( getSalesforceIDFieldName() );
  if ( !Utils.isEmpty( realfieldname ) ) {
    ValueMetaInterface v = new ValueMetaString( realfieldname );
    v.setLength( 18 );
    v.setOrigin( name );
    r.addValueMeta( v );
  }
}
 
Example #25
Source File: TextFileOutputLegacyMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
protected void readData( Node stepnode, IMetaStore metastore ) throws KettleXMLException {
  super.readData( stepnode, metastore );
  try {
    fileAsCommand = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "is_command" ) );
  } catch ( Exception e ) {
    throw new KettleXMLException( "Unable to load step info from XML", e );
  }
}
 
Example #26
Source File: JobEntryXSLT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
    List<SlaveServer> slaveServers ) throws KettleException {
  try {
    xmlfilename = rep.getJobEntryAttributeString( id_jobentry, "xmlfilename" );
    xslfilename = rep.getJobEntryAttributeString( id_jobentry, "xslfilename" );
    outputfilename = rep.getJobEntryAttributeString( id_jobentry, "outputfilename" );
    iffileexists = (int) rep.getJobEntryAttributeInteger( id_jobentry, "iffileexists" );
    addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" );
    filenamesfromprevious = rep.getJobEntryAttributeBoolean( id_jobentry, "filenamesfromprevious" );
    xsltfactory = rep.getJobEntryAttributeString( id_jobentry, "xsltfactory" );
    if ( xsltfactory == null ) {
      xsltfactory = FACTORY_JAXP;
    }

    int nrparams = rep.countNrJobEntryAttributes( id_jobentry, "param_name" );
    int nroutputprops = rep.countNrJobEntryAttributes( id_jobentry, "output_property_name" );
    allocate( nrparams, nroutputprops );

    for ( int i = 0; i < nrparams; i++ ) {
      parameterField[i] = rep.getJobEntryAttributeString( id_jobentry, i, "param_field" );
      parameterName[i] = rep.getJobEntryAttributeString( id_jobentry, i, "param_name" );
    }
    for ( int i = 0; i < nroutputprops; i++ ) {
      outputPropertyName[i] = rep.getJobEntryAttributeString( id_jobentry, i, "output_property_name" );
      outputPropertyValue[i] = rep.getJobEntryAttributeString( id_jobentry, i, "output_property_value" );
    }
  } catch ( KettleException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'xslt' from the repository for id_jobentry="
        + id_jobentry, dbe );
  }
}
 
Example #27
Source File: SingleThreaderMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
  try {
    String method = XMLHandler.getTagValue( stepnode, "specification_method" );
    specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method );
    String transId = XMLHandler.getTagValue( stepnode, "trans_object_id" );
    transObjectId = Utils.isEmpty( transId ) ? null : new StringObjectId( transId );

    transName = XMLHandler.getTagValue( stepnode, "trans_name" );
    fileName = XMLHandler.getTagValue( stepnode, "filename" );
    directoryPath = XMLHandler.getTagValue( stepnode, "directory_path" );

    batchSize = XMLHandler.getTagValue( stepnode, "batch_size" );
    batchTime = XMLHandler.getTagValue( stepnode, "batch_time" );
    injectStep = XMLHandler.getTagValue( stepnode, "inject_step" );
    retrieveStep = XMLHandler.getTagValue( stepnode, "retrieve_step" );

    Node parametersNode = XMLHandler.getSubNode( stepnode, "parameters" );

    String passAll = XMLHandler.getTagValue( parametersNode, "pass_all_parameters" );
    passingAllParameters = Utils.isEmpty( passAll ) || "Y".equalsIgnoreCase( passAll );

    int nrParameters = XMLHandler.countNodes( parametersNode, "parameter" );

    allocate( nrParameters );

    for ( int i = 0; i < nrParameters; i++ ) {
      Node knode = XMLHandler.getSubNodeByNr( parametersNode, "parameter", i );

      parameters[i] = XMLHandler.getTagValue( knode, "name" );
      parameterValues[i] = XMLHandler.getTagValue( knode, "value" );
    }
  } catch ( Exception e ) {
    throw new KettleXMLException( BaseMessages.getString(
      PKG, "SingleThreaderMeta.Exception.ErrorLoadingTransformationStepFromXML" ), e );
  }
}
 
Example #28
Source File: BlockingStepMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
  throws KettleException {
  try {
    passAllRows = rep.getStepAttributeBoolean( id_step, "pass_all_rows" );
    directory = rep.getStepAttributeString( id_step, "directory" );
    prefix = rep.getStepAttributeString( id_step, "prefix" );
    cacheSize = (int) rep.getStepAttributeInteger( id_step, "cache_size" );
    compressFiles = rep.getStepAttributeBoolean( id_step, "compress" );
    if ( cacheSize == 0 ) {
      cacheSize = CACHE_SIZE;
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example #29
Source File: JobEntryAbort.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    messageAbort = XMLHandler.getTagValue( entrynode, "message" );
  } catch ( Exception e ) {
    throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryAbort.UnableToLoadFromXml.Label" ), e );
  }
}
 
Example #30
Source File: JobEntryPing.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    hostname = rep.getJobEntryAttributeString( id_jobentry, "hostname" );
    nbrPackets = rep.getJobEntryAttributeString( id_jobentry, "nbr_packets" );

    // TODO: The following lines may be removed 3 versions after 2.5.0
    String nbrPaquets = rep.getJobEntryAttributeString( id_jobentry, "nbrpaquets" );
    if ( nbrPackets == null && nbrPaquets != null ) {
      // if only nbrpaquets exists this means that the file was
      // save by a version 2.5.0 ping job entry
      nbrPackets = nbrPaquets;
    }
    timeout = rep.getJobEntryAttributeString( id_jobentry, "timeout" );

    pingtype = rep.getJobEntryAttributeString( id_jobentry, "pingtype" );
    if ( Utils.isEmpty( pingtype ) ) {
      pingtype = classicPing;
      ipingtype = iclassicPing;
    } else {
      if ( pingtype.equals( systemPing ) ) {
        ipingtype = isystemPing;
      } else if ( pingtype.equals( bothPings ) ) {
        ipingtype = ibothPings;
      } else {
        ipingtype = iclassicPing;
      }
    }
  } catch ( KettleException dbe ) {
    throw new KettleException(
      "Unable to load job entry of type 'ping' exists from the repository for id_jobentry=" + id_jobentry, dbe );
  }
}