org.pentaho.di.repository.Repository Java Examples

The following examples show how to use org.pentaho.di.repository.Repository. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StreamToTransNodeConverter.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public IRepositoryFileData convert( final InputStream inputStream, final String charset, final String mimeType ) {
  try {
    long size = inputStream.available();
    TransMeta transMeta = new TransMeta();
    Repository repository = connectToRepository();
    Document doc = PDIImportUtil.loadXMLFrom( inputStream );
    transMeta.loadXML( doc.getDocumentElement(), repository, false );

    if ( transMeta.hasMissingPlugins() ) {
      KettleMissingPluginsException
        missingPluginsException =
        new KettleMissingPluginsException( getErrorMessage( transMeta.getMissingTrans() ) );
      throw new ConverterException( missingPluginsException );
    }

    TransDelegate delegate = new TransDelegate( repository, this.unifiedRepository );
    saveSharedObjects( repository, transMeta );
    return new NodeRepositoryFileData( delegate.elementToDataNode( transMeta ), size );
  } catch ( IOException | KettleException e ) {
    logger.error( e );
    return null;
  }
}
 
Example #2
Source File: PropertyOutputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {

    keyfield = rep.getStepAttributeString( id_step, "keyfield" );
    valuefield = rep.getStepAttributeString( id_step, "valuefield" );
    comment = rep.getStepAttributeString( id_step, "comment" );

    fileName = rep.getStepAttributeString( id_step, "file_name" );
    extension = rep.getStepAttributeString( id_step, "file_extention" );
    stepNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_stepnr" );
    partNrInFilename = rep.getStepAttributeBoolean( id_step, "file_add_partnr" );
    dateInFilename = rep.getStepAttributeBoolean( id_step, "file_add_date" );
    timeInFilename = rep.getStepAttributeBoolean( id_step, "file_add_time" );

    createparentfolder = rep.getStepAttributeBoolean( id_step, "create_parent_folder" );
    addToResult = rep.getStepAttributeBoolean( id_step, "addtoresult" );
    append = rep.getStepAttributeBoolean( id_step, "append" );
    fileNameInField = rep.getStepAttributeBoolean( id_step, "fileNameInField" );
    fileNameField = rep.getStepAttributeString( id_step, "fileNameField" );

  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example #3
Source File: PaloDimInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId idStep, List<DatabaseMeta> databases )
  throws KettleException {
  try {
    this.databaseMeta = rep.loadDatabaseMetaFromStepAttribute( idStep, "connection", databases );
    this.dimension = rep.getStepAttributeString( idStep, "dimension" );
    this.baseElementsOnly = rep.getStepAttributeBoolean( idStep, "baseElementsOnly" );

    int nrLevels = rep.countNrStepAttributes( idStep, "levelname" );

    for ( int i = 0; i < nrLevels; i++ ) {
      String levelName = rep.getStepAttributeString( idStep, i, "levelname" );
      int levelNumber = (int) rep.getStepAttributeInteger( idStep, i, "levelnumber" );
      String fieldName = rep.getStepAttributeString( idStep, i, "fieldname" );
      String fieldType = rep.getStepAttributeString( idStep, i, "fieldtype" );
      this.levels.add( new PaloDimensionLevel( levelName, levelNumber, fieldName, fieldType ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example #4
Source File: AggregateRowsMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {

  try {
    int nrfields = rep.countNrStepAttributes( id_step, "field_name" );

    allocate( nrfields );

    for ( int i = 0; i < nrfields; i++ ) {
      fieldName[i] = rep.getStepAttributeString( id_step, i, "field_name" );
      fieldNewName[i] = rep.getStepAttributeString( id_step, i, "field_rename" );
      aggregateType[i] = getType( rep.getStepAttributeString( id_step, i, "field_type" ) );
    }
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "AggregateRowsMeta.Exception.UnexpectedErrorWhileReadingStepInfo" ), e );
  }

}
 
Example #5
Source File: ExecuteTransServlet.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private Repository openRepository( String repositoryName, String user, String pass ) throws KettleException {

    if ( Utils.isEmpty( repositoryName ) ) {
      return null;
    }

    RepositoriesMeta repositoriesMeta = new RepositoriesMeta();
    repositoriesMeta.readData();
    RepositoryMeta repositoryMeta = repositoriesMeta.findRepository( repositoryName );
    if ( repositoryMeta == null ) {
      throw new KettleException( "Unable to find repository: " + repositoryName );
    }
    PluginRegistry registry = PluginRegistry.getInstance();
    Repository repository = registry.loadClass( RepositoryPluginType.class, repositoryMeta, Repository.class );
    repository.init( repositoryMeta );
    repository.connect( user, pass );
    return repository;
  }
 
Example #6
Source File: JobEntryTrans.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
                   Repository repository, IMetaStore metaStore ) {
  if ( setLogfile ) {
    JobEntryValidatorUtils.andValidator().validate( this, "logfile", remarks,
        AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
  }
  if ( !Utils.isEmpty( filename ) ) {
    JobEntryValidatorUtils.andValidator().validate( this, "filename", remarks,
        AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
  } else {
    JobEntryValidatorUtils.andValidator().validate( this, "transname", remarks,
        AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
    JobEntryValidatorUtils.andValidator().validate( this, "directory", remarks,
        AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) );
  }
}
 
Example #7
Source File: JobEntryCheckDbConnections.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    Node fields = XMLHandler.getSubNode( entrynode, "connections" );

    // How many hosts?
    int nrFields = XMLHandler.countNodes( fields, "connection" );
    connections = new DatabaseMeta[nrFields];
    waitfors = new String[nrFields];
    waittimes = new int[nrFields];
    // Read them all...
    for ( int i = 0; i < nrFields; i++ ) {
      Node fnode = XMLHandler.getSubNodeByNr( fields, "connection", i );
      String dbname = XMLHandler.getTagValue( fnode, "name" );
      connections[i] = DatabaseMeta.findDatabase( databases, dbname );
      waitfors[i] = XMLHandler.getTagValue( fnode, "waitfor" );
      waittimes[i] = getWaitByCode( Const.NVL( XMLHandler.getTagValue( fnode, "waittime" ), "" ) );
    }
  } catch ( KettleXMLException xe ) {
    throw new KettleXMLException( BaseMessages.getString(
      PKG, "JobEntryCheckDbConnections.ERROR_0001_Cannot_Load_Job_Entry_From_Xml_Node", xe.getMessage() ) );
  }
}
 
Example #8
Source File: XsdValidatorMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
  throws KettleException {
  try {
    xsdFilename = rep.getStepAttributeString( id_step, "xdsfilename" );
    xmlStream = rep.getStepAttributeString( id_step, "xmlstream" );
    resultFieldname = rep.getStepAttributeString( id_step, "resultfieldname" );

    xmlSourceFile = rep.getStepAttributeBoolean( id_step, "xmlsourcefile" );
    addValidationMessage = rep.getStepAttributeBoolean( id_step, "addvalidationmsg" );
    validationMessageField = rep.getStepAttributeString( id_step, "validationmsgfield" );
    ifXmlValid = rep.getStepAttributeString( id_step, "ifxmlvalid" );
    ifXmlInvalid = rep.getStepAttributeString( id_step, "ifxmlunvalid" );

    outputStringField = rep.getStepAttributeBoolean( id_step, "outputstringfield" );
    xsdDefinedField = rep.getStepAttributeString( id_step, "xsddefinedfield" );
    xsdSource = rep.getStepAttributeString( id_step, "xsdsource" );

    allowExternalEntities =
      Boolean.parseBoolean( rep.getJobEntryAttributeString( id_step, "allowExternalEntities" ) );

  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString( PKG,
        "XsdValidatorMeta.Exception.UnexpectedErrorInReadingStepInfo" ), e );
  }
}
 
Example #9
Source File: NumberRangeMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
  try {
    rep.saveStepAttribute( id_transformation, id_step, "inputField", inputField );
    rep.saveStepAttribute( id_transformation, id_step, "outputField", outputField );
    rep.saveStepAttribute( id_transformation, id_step, "fallBackValue", getFallBackValue() );

    int i = 0;
    for ( NumberRangeRule rule : rules ) {
      rep
        .saveStepAttribute( id_transformation, id_step, i, "lower_bound", String
          .valueOf( rule.getLowerBound() ) );
      rep
        .saveStepAttribute( id_transformation, id_step, i, "upper_bound", String
          .valueOf( rule.getUpperBound() ) );
      rep.saveStepAttribute( id_transformation, id_step, i, "value", String.valueOf( rule.getValue() ) );
      i++;
    }
  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to save step information to the repository, id_step=" + id_step, dbe );
  }
}
 
Example #10
Source File: JobExecutionConfiguration.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public Repository connectRepository( RepositoriesMeta repositoriesMeta, String repositoryName, String username, String password ) throws KettleException {
  RepositoryMeta repositoryMeta = repositoriesMeta.findRepository( repositoryName );
  if ( repositoryMeta == null ) {
    log.logBasic( "I couldn't find the repository with name '" + repositoryName + "'" );
    return null;
  }

  Repository rep = PluginRegistry.getInstance().loadClass( RepositoryPluginType.class, repositoryMeta,
      Repository.class );
  rep.init( repositoryMeta );

  try {
    rep.connect( username, password );
    log.logBasic( "Connected to " + repositoryName + " as " + username );
    setRepository( rep );
    return rep;
  } catch ( Exception e ) {
    log.logBasic( "Unable to connect to the repository with name '" + repositoryName + "'" );
    return null;
  }
}
 
Example #11
Source File: DataGridMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void getFields( RowMetaInterface rowMeta, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  for ( int i = 0; i < fieldName.length; i++ ) {
    try {
      if ( !Utils.isEmpty( fieldName[i] ) ) {
        int type = ValueMetaFactory.getIdForValueMeta( fieldType[i] );
        if ( type == ValueMetaInterface.TYPE_NONE ) {
          type = ValueMetaInterface.TYPE_STRING;
        }
        ValueMetaInterface v = ValueMetaFactory.createValueMeta( fieldName[i], type );
        v.setLength( fieldLength[i] );
        v.setPrecision( fieldPrecision[i] );
        v.setOrigin( name );
        v.setConversionMask( fieldFormat[i] );
        v.setCurrencySymbol( currency[i] );
        v.setGroupingSymbol( group[i] );
        v.setDecimalSymbol( decimal[i] );

        rowMeta.addValueMeta( v );
      }
    } catch ( Exception e ) {
      throw new KettleStepException( "Unable to create value of type " + fieldType[i], e );
    }
  }
}
 
Example #12
Source File: MetaInjectMetaTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void exportResources() throws KettleException {
  VariableSpace variableSpace = mock( VariableSpace.class );
  ResourceNamingInterface resourceNamingInterface = mock( ResourceNamingInterface.class );
  Repository repository = mock( Repository.class );
  IMetaStore metaStore = mock( IMetaStore.class );

  MetaInjectMeta injectMetaSpy = spy( metaInjectMeta );
  TransMeta transMeta = mock( TransMeta.class );
  Map<String, ResourceDefinition> definitions = Collections.<String, ResourceDefinition>emptyMap();
  doReturn( TEST_FILE_NAME ).when( transMeta ).exportResources( transMeta, definitions, resourceNamingInterface,
      repository, metaStore );
  doReturn( transMeta ).when( injectMetaSpy ).loadTransformationMeta( repository, variableSpace );

  String actualExportedFileName =
      injectMetaSpy.exportResources( variableSpace, definitions, resourceNamingInterface, repository, metaStore );
  assertEquals( TEST_FILE_NAME, actualExportedFileName );
  assertEquals( EXPORTED_FILE_NAME, injectMetaSpy.getFileName() );
  verify( transMeta ).exportResources( transMeta, definitions, resourceNamingInterface, repository, metaStore );
}
 
Example #13
Source File: ExecProcessMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {
    processfield = rep.getStepAttributeString( id_step, "processfield" );
    resultfieldname = rep.getStepAttributeString( id_step, "resultfieldname" );
    errorfieldname = rep.getStepAttributeString( id_step, "errorfieldname" );
    exitvaluefieldname = rep.getStepAttributeString( id_step, "exitvaluefieldname" );
    failwhennotsuccess = rep.getStepAttributeBoolean( id_step, "failwhennotsuccess" );
    outputLineDelimiter = rep.getStepAttributeString( id_step, "outputlinedelimiter" );
    if ( outputLineDelimiter == null ) {
      outputLineDelimiter = ""; // default to empty string for backward compatibility
    }
    argumentsInFields = rep.getStepAttributeBoolean( id_step, "argumentsInFields" );

    int argCount = rep.countNrStepAttributes( id_step, "argumentFieldName" );
    argumentFieldNames = new String[argCount];
    for ( int i = 0; i < argCount; i++ ) {
      argumentFieldNames[i] = rep.getStepAttributeString( id_step, i, "argumentFieldName" );
    }
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "ExecProcessMeta.Exception.UnexpectedErrorReadingStepInfo" ), e );
  }
}
 
Example #14
Source File: AccessInputMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * @param space
 *          the variable space to use
 * @param definitions
 * @param resourceNamingInterface
 * @param repository
 *          The repository to optionally load other resources from (to be converted to XML)
 * @param metaStore
 *          the metaStore in which non-kettle metadata could reside.
 *
 * @return the filename of the exported resource
 */
public String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions,
  ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ) throws KettleException {
  try {
    // The object that we're modifying here is a copy of the original!
    // So let's change the filename from relative to absolute by grabbing the file object...
    //
    // Replace the filename ONLY (folder or filename)
    //
    for ( int i = 0; i < fileName.length; i++ ) {
      FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( fileName[i] ), space );
      fileName[i] = resourceNamingInterface.nameResource( fileObject, space, Utils.isEmpty( fileMask[i] ) );
    }
    return null;
  } catch ( Exception e ) {
    throw new KettleException( e );
  }
}
 
Example #15
Source File: JobEntryGetPOP.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void check( List<CheckResultInterface> remarks, JobMeta jobMeta, VariableSpace space,
  Repository repository, IMetaStore metaStore ) {
  JobEntryValidatorUtils.andValidator().validate( this, "serverName", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
  JobEntryValidatorUtils.andValidator().validate( this, "userName", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.notBlankValidator() ) );
  JobEntryValidatorUtils.andValidator().validate( this, "password", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.notNullValidator() ) );

  ValidatorContext ctx = new ValidatorContext();
  AbstractFileValidator.putVariableSpace( ctx, getVariables() );
  AndValidator.putValidators( ctx, JobEntryValidatorUtils.notBlankValidator(),
      JobEntryValidatorUtils.fileExistsValidator() );
  JobEntryValidatorUtils.andValidator().validate( this, "outputDirectory", remarks, ctx );

  JobEntryValidatorUtils.andValidator().validate( this, "SSLPort", remarks,
      AndValidator.putValidators( JobEntryValidatorUtils.integerValidator() ) );
}
 
Example #16
Source File: JobEntrySpecial.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    start = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "start" ) );
    dummy = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "dummy" ) );
    repeat = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "repeat" ) );
    setSchedulerType( Const.toInt( XMLHandler.getTagValue( entrynode, "schedulerType" ), NOSCHEDULING ) );
    setIntervalSeconds( Const.toInt( XMLHandler.getTagValue( entrynode, "intervalSeconds" ), 0 ) );
    setIntervalMinutes( Const.toInt( XMLHandler.getTagValue( entrynode, "intervalMinutes" ), 0 ) );
    setHour( Const.toInt( XMLHandler.getTagValue( entrynode, "hour" ), 0 ) );
    setMinutes( Const.toInt( XMLHandler.getTagValue( entrynode, "minutes" ), 0 ) );
    setWeekDay( Const.toInt( XMLHandler.getTagValue( entrynode, "weekDay" ), 0 ) );
    setDayOfMonth( Const.toInt( XMLHandler.getTagValue( entrynode, "dayOfMonth" ), 0 ) );
  } catch ( KettleException e ) {
    throw new KettleXMLException( "Unable to load job entry of type 'special' from XML node", e );
  }
}
 
Example #17
Source File: JobEntryMysqlBulkLoad.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
  Repository rep, IMetaStore metaStore ) throws KettleXMLException {
  try {
    super.loadXML( entrynode, databases, slaveServers );
    schemaname = XMLHandler.getTagValue( entrynode, "schemaname" );
    tablename = XMLHandler.getTagValue( entrynode, "tablename" );
    filename = XMLHandler.getTagValue( entrynode, "filename" );
    separator = XMLHandler.getTagValue( entrynode, "separator" );
    enclosed = XMLHandler.getTagValue( entrynode, "enclosed" );
    escaped = XMLHandler.getTagValue( entrynode, "escaped" );

    linestarted = XMLHandler.getTagValue( entrynode, "linestarted" );
    lineterminated = XMLHandler.getTagValue( entrynode, "lineterminated" );
    replacedata = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "replacedata" ) );
    ignorelines = XMLHandler.getTagValue( entrynode, "ignorelines" );
    listattribut = XMLHandler.getTagValue( entrynode, "listattribut" );
    localinfile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "localinfile" ) );
    prorityvalue = Const.toInt( XMLHandler.getTagValue( entrynode, "prorityvalue" ), -1 );
    String dbname = XMLHandler.getTagValue( entrynode, "connection" );
    addfiletoresult = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "addfiletoresult" ) );
    connection = DatabaseMeta.findDatabase( databases, dbname );
  } catch ( KettleException e ) {
    throw new KettleXMLException( "Unable to load job entry of type 'Mysql bulk load' from XML node", e );
  }
}
 
Example #18
Source File: RepositoryDirectoryUITest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Test
public void testLoadRepositoryObjectsFilled() throws Exception {
  RepositoryDirectory root = new RepositoryDirectory();
  root.setObjectId( new LongObjectId( 0L ) );
  RepositoryDirectory dir = new RepositoryDirectory();
  dir.setObjectId( new LongObjectId( 1L ) );
  root.addSubdirectory( dir );
  RepositoryElementMetaInterface meta = mock( RepositoryElementMetaInterface.class );
  when( meta.getObjectType() ).thenReturn( RepositoryObjectType.TRANSFORMATION );

  root.setRepositoryObjects( Collections.emptyList() );
  dir.setRepositoryObjects( Collections.singletonList( meta ) );
  Repository repo = mock( Repository.class );

  assertTrue( RepositoryDirectoryUI.loadRepositoryObjects( root, true, true, repo ).isEmpty() );
  assertEquals( 1, RepositoryDirectoryUI.loadRepositoryObjects( dir, true, true, repo ).size() );

  verifyZeroInteractions( repo );
}
 
Example #19
Source File: UnivariateStatsMetaFunction.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Construct using data stored in repository
 *
 * @param rep
 *          the repository
 * @param id_step
 *          the id of the step
 * @param nr
 *          the step number
 * @exception KettleException
 *              if an error occurs
 */
public UnivariateStatsMetaFunction( Repository rep, ObjectId id_step, int nr ) throws KettleException {
  m_sourceFieldName = rep.getStepAttributeString( id_step, nr, "source_field_name" );
  m_n = rep.getStepAttributeBoolean( id_step, nr, "N" );
  m_mean = rep.getStepAttributeBoolean( id_step, nr, "mean" );
  m_stdDev = rep.getStepAttributeBoolean( id_step, nr, "stdDev" );
  m_min = rep.getStepAttributeBoolean( id_step, nr, "min" );
  m_max = rep.getStepAttributeBoolean( id_step, nr, "max" );
  m_median = rep.getStepAttributeBoolean( id_step, nr, "median" );
  String temp = rep.getStepAttributeString( id_step, nr, "percentile" );
  try {
    m_arbitraryPercentile = Double.parseDouble( temp );
  } catch ( Exception ex ) {
    m_arbitraryPercentile = -1;
  }
  m_interpolatePercentile = rep.getStepAttributeBoolean( id_step, nr, "interpolate" );
}
 
Example #20
Source File: FormulaMetaFunction.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step, int nr ) throws KettleException {
  rep.saveStepAttribute( id_transformation, id_step, nr, "field_name", fieldName );
  rep.saveStepAttribute( id_transformation, id_step, nr, "formula_string", formula );
  rep.saveStepAttribute( id_transformation, id_step, nr, "value_type",
    ValueMetaFactory.getValueMetaName( valueType ) );
  rep.saveStepAttribute( id_transformation, id_step, nr, "value_length", valueLength );
  rep.saveStepAttribute( id_transformation, id_step, nr, "value_precision", valuePrecision );
  rep.saveStepAttribute( id_transformation, id_step, nr, "replace_field", replaceField );
}
 
Example #21
Source File: ConcatFieldsMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
                       VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  // do not call the super class from TextFileOutputMeta since it modifies the source meta data
  // see getFieldsModifyInput() instead

  // remove selected fields from the stream when true
  if ( removeSelectedFields ) {
    if ( getOutputFields().length > 0 ) {
      for ( int i = 0; i < getOutputFields().length; i++ ) {
        TextFileField field = getOutputFields()[ i ];
        try {
          row.removeValueMeta( field.getName() );
        } catch ( KettleValueException e ) {
          // just ignore exceptions since missing fields are handled in the ConcatFields class
        }
      }
    } else { // no output fields selected, take them all, remove them all
      row.clear();
    }
  }

  // Check Target Field Name
  if ( Utils.isEmpty( targetFieldName ) ) {
    throw new KettleStepException( BaseMessages.getString(
      PKG, "ConcatFieldsMeta.CheckResult.TargetFieldNameMissing" ) );
  }
  // add targetFieldName
  ValueMetaInterface vValue = new ValueMetaString( targetFieldName );
  vValue.setLength( targetFieldLength, 0 );
  vValue.setOrigin( name );
  if ( !Utils.isEmpty( getEncoding() ) ) {
    vValue.setStringEncoding( getEncoding() );
  }
  row.addValueMeta( vValue );
}
 
Example #22
Source File: SaveProgressDialog.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new dialog that will handle the wait while saving a transformation...
 */
public SaveProgressDialog( Shell shell, Repository rep, EngineMetaInterface meta, String versionComment ) {
  this.shell = shell;
  this.rep = rep;
  this.meta = meta;
  this.versionComment = versionComment;
}
 
Example #23
Source File: JobEntryFolderIsEmpty.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
  List<SlaveServer> slaveServers ) throws KettleException {
  try {
    foldername = rep.getJobEntryAttributeString( id_jobentry, "foldername" );
    includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" );
    specifywildcard = rep.getJobEntryAttributeBoolean( id_jobentry, "specify_wildcard" );
    wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" );
  } catch ( KettleException dbe ) {
    throw new KettleException(
      "Unable to load job entry of type 'create Folder' from the repository for id_jobentry=" + id_jobentry,
      dbe );
  }
}
 
Example #24
Source File: CiviInputMeta.java    From civicrm-data-integration with GNU General Public License v3.0 5 votes vote down vote up
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException {
    try {
        super.saveRep(rep, id_transformation, id_step);

        rep.saveStepAttribute(id_transformation, id_step, "civiCrmOnMultipleRows", civiCrmOnMultipleRows);
        rep.saveStepAttribute(id_transformation, id_step, "civiCrmEntityOptionField", civiCrmEntityOptionField);
        rep.saveStepAttribute(id_transformation, id_step, "civiCrmPassRowOnFail", civiCrmPassRowOnFail);

        rep.saveStepAttribute(id_transformation, id_step, "civiCrmPageSize", civiCrmPageSize);
        rep.saveStepAttribute(id_transformation, id_step, "hasPreviousStep", hasPreviousStep);

        int i = 0;
        for (String filterKey : civiCrmFilterList) {
            rep.saveStepAttribute(id_transformation, id_step, i++, "filterKey", filterKey);
        }

        i = 0;
        for (String filterField : civiCrmFilterMap.keySet()) {
            rep.saveStepAttribute(id_transformation, id_step, i++, "filter", filterField + "=" + civiCrmFilterMap.get(filterField));
        }

        i = 0;
        for (String previousField : civiCrmPrevFields) {
            rep.saveStepAttribute(id_transformation, id_step, i++, "previousField", previousField);
        }


        i = 0;
        for (String filterOperator : civiCrmFilterOperator) {
            rep.saveStepAttribute(id_transformation, id_step, i++, "filterOperator", filterOperator);
        }
    } catch (Exception e) {
        throw new KettleException(BaseMessages.getString(PKG, "CiviCrmStep.Exception.UnableToSaveStepInfoToRepository") + id_step, e);
    }
}
 
Example #25
Source File: UIObjectRegistry.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public UIRepositoryDirectory constructUIRepositoryDirectory( RepositoryDirectoryInterface rd,
  UIRepositoryDirectory uiParent, Repository rep ) throws UIObjectCreationException {
  try {
    Constructor<?> constructor =
      dirClass.getConstructor(
        RepositoryDirectoryInterface.class, UIRepositoryDirectory.class, Repository.class );
    if ( constructor != null ) {
      return (UIRepositoryDirectory) constructor.newInstance( rd, uiParent, rep );
    } else {
      throw new UIObjectCreationException( "Unable to get the constructor for " + dirClass );
    }
  } catch ( Exception e ) {
    throw new UIObjectCreationException( "Unable to instantiate object for " + dirClass );
  }
}
 
Example #26
Source File: HTTPPOSTMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException {
  try {
    rep.saveStepAttribute( id_transformation, id_step, "postafile", postafile );
    rep.saveStepAttribute( id_transformation, id_step, "encoding", encoding );
    rep.saveStepAttribute( id_transformation, id_step, "url", url );
    rep.saveStepAttribute( id_transformation, id_step, "urlInField", urlInField );
    rep.saveStepAttribute( id_transformation, id_step, "urlField", urlField );
    rep.saveStepAttribute( id_transformation, id_step, "requestEntity", requestEntity );
    rep.saveStepAttribute( id_transformation, id_step, "httpLogin", httpLogin );
    rep.saveStepAttribute( id_transformation, id_step, "httpPassword", Encr
      .encryptPasswordIfNotUsingVariables( httpPassword ) );
    rep.saveStepAttribute( id_transformation, id_step, "proxyHost", proxyHost );
    rep.saveStepAttribute( id_transformation, id_step, "proxyPort", proxyPort );

    rep.saveStepAttribute( id_transformation, id_step, "socketTimeout", socketTimeout );
    rep.saveStepAttribute( id_transformation, id_step, "connectionTimeout", connectionTimeout );
    rep.saveStepAttribute( id_transformation, id_step, "closeIdleConnectionsTime", closeIdleConnectionsTime );

    for ( int i = 0; i < argumentField.length; i++ ) {
      rep.saveStepAttribute( id_transformation, id_step, i, "arg_name", argumentField[i] );
      rep.saveStepAttribute( id_transformation, id_step, i, "arg_parameter", argumentParameter[i] );
      rep.saveStepAttribute( id_transformation, id_step, i, "arg_header", argumentHeader[i] );
    }
    for ( int i = 0; i < queryField.length; i++ ) {
      rep.saveStepAttribute( id_transformation, id_step, i, "query_name", queryField[i] );
      rep.saveStepAttribute( id_transformation, id_step, i, "query_parameter", queryParameter[i] );
    }

    rep.saveStepAttribute( id_transformation, id_step, "result_name", fieldName );
    rep.saveStepAttribute( id_transformation, id_step, "result_code", resultCodeFieldName );
    rep.saveStepAttribute( id_transformation, id_step, "response_time", responseTimeFieldName );
    rep.saveStepAttribute( id_transformation, id_step, "response_header", responseHeaderFieldName );
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString( PKG, "HTTPPOSTMeta.Exception.UnableToSaveStepInfo" )
      + id_step, e );
  }
}
 
Example #27
Source File: CubeOutputMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException {
  try {
    filename = rep.getStepAttributeString( id_step, "file_name" );
    addToResultFilenames = rep.getStepAttributeBoolean( id_step, "add_to_result_filenames" );
    doNotOpenNewFileInit = rep.getStepAttributeBoolean( id_step, "do_not_open_newfile_init" );

  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "CubeOutputMeta.Exception.UnexpectedErrorInReadingStepInfo" ), e );
  }
}
 
Example #28
Source File: JobMetaTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testLookupRepositoryReferences() throws Exception {
  jobMeta.clear();

  JobEntryTrans jobEntryMock = mock( JobEntryTrans.class );
  when( jobEntryMock.hasRepositoryReferences() ).thenReturn( true );

  JobEntryTrans brokenJobEntryMock = mock( JobEntryTrans.class );
  when( brokenJobEntryMock.hasRepositoryReferences() ).thenReturn( true );
  doThrow( mock( IdNotFoundException.class ) ).when( brokenJobEntryMock ).lookupRepositoryReferences( any(
      Repository.class ) );

  JobEntryCopy jobEntryCopy1 = mock( JobEntryCopy.class );
  when( jobEntryCopy1.getEntry() ).thenReturn( jobEntryMock );
  jobMeta.addJobEntry( 0, jobEntryCopy1 );

  JobEntryCopy jobEntryCopy2 = mock( JobEntryCopy.class );
  when( jobEntryCopy2.getEntry() ).thenReturn( brokenJobEntryMock );
  jobMeta.addJobEntry( 1, jobEntryCopy2 );

  JobEntryCopy jobEntryCopy3 = mock( JobEntryCopy.class );
  when( jobEntryCopy3.getEntry() ).thenReturn( jobEntryMock );
  jobMeta.addJobEntry( 2, jobEntryCopy3 );

  try {
    jobMeta.lookupRepositoryReferences( mock( Repository.class ) );
    fail( "no exception for broken entry" );
  } catch ( LookupReferencesException e ) {
    // ok
  }
  verify( jobEntryMock, times( 2 ) ).lookupRepositoryReferences( any( Repository.class ) );
}
 
Example #29
Source File: DataGridMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void readRep( Repository rep, IMetaStore metaStore, ObjectId idStep, List<DatabaseMeta> databases ) throws KettleException {

  try {
    int nrfields = rep.countNrStepAttributes( idStep, FIELD_NAME );

    allocate( nrfields );

    for ( int i = 0; i < nrfields; i++ ) {
      fieldName[i] = rep.getStepAttributeString( idStep, i, FIELD_NAME );
      fieldType[i] = rep.getStepAttributeString( idStep, i, "field_type" );

      fieldFormat[i] = rep.getStepAttributeString( idStep, i, "field_format" );
      currency[i] = rep.getStepAttributeString( idStep, i, "field_currency" );
      decimal[i] = rep.getStepAttributeString( idStep, i, "field_decimal" );
      group[i] = rep.getStepAttributeString( idStep, i, "field_group" );
      fieldLength[i] = (int) rep.getStepAttributeInteger( idStep, i, "field_length" );
      fieldPrecision[i] = (int) rep.getStepAttributeInteger( idStep, i, "field_precision" );
      setEmptyString[i] = rep.getStepAttributeBoolean( idStep, i, "set_empty_string", false );
      fieldNullIf[i] = rep.getStepAttributeString( idStep, i, FIELD_NULL_IF );
    }

    int nrLines = (int) rep.getStepAttributeInteger( idStep, "nr_lines" );
    dataLines = new ArrayList<>();
    for ( int i = 0; i < nrLines; i++ ) {
      List<String> line = new ArrayList<>();

      for ( int f = 0; f < nrfields; f++ ) {
        String item = rep.getStepAttributeString( idStep, i, "item_" + f );
        line.add( item );
      }

      dataLines.add( line );
    }
  } catch ( Exception e ) {
    throw new KettleException( "Unexpected error reading step information from the repository", e );
  }
}
 
Example #30
Source File: JobEntrySFTPPUT.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
  try {
    rep.saveJobEntryAttribute( id_job, getObjectId(), "servername", serverName );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "serverport", serverPort );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "username", userName );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "password", Encr
      .encryptPasswordIfNotUsingVariables( password ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "sftpdirectory", sftpDirectory );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "localdirectory", localDirectory );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "wildcard", wildcard );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "copyprevious", copyprevious );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "copypreviousfiles", copypreviousfiles );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "addFilenameResut", addFilenameResut );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "usekeyfilename", usekeyfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilename", keyfilename );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "keyfilepass", Encr
      .encryptPasswordIfNotUsingVariables( keyfilepass ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "compression", compression );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyType", proxyType );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyHost", proxyHost );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPort", proxyPort );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyUsername", proxyUsername );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "proxyPassword", Encr
      .encryptPasswordIfNotUsingVariables( proxyPassword ) );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "aftersftpput", getAfterSFTPPutCode( getAfterFTPS() ) );

    rep.saveJobEntryAttribute( id_job, getObjectId(), "createRemoteFolder", createRemoteFolder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "destinationfolder", destinationfolder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "createdestinationfolder", createDestinationFolder );
    rep.saveJobEntryAttribute( id_job, getObjectId(), "successWhenNoFile", successWhenNoFile );

  } catch ( KettleDatabaseException dbe ) {
    throw new KettleException( "Unable to load job entry of type 'SFTPPUT' to the repository for id_job="
      + id_job, dbe );
  }
}