Java Code Examples for org.pentaho.di.core.row.RowMetaInterface#indexOfValue()

The following examples show how to use org.pentaho.di.core.row.RowMetaInterface#indexOfValue() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HTTP.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private Object[] execHttp( RowMetaInterface rowMeta, Object[] row ) throws KettleException {
  if ( first ) {
    first = false;
    data.argnrs = new int[ meta.getArgumentField().length ];

    for ( int i = 0; i < meta.getArgumentField().length; i++ ) {
      data.argnrs[ i ] = rowMeta.indexOfValue( meta.getArgumentField()[ i ] );
      if ( data.argnrs[ i ] < 0 ) {
        logError( BaseMessages.getString( PKG, "HTTP.Log.ErrorFindingField" ) + meta.getArgumentField()[ i ] + "]" );
        throw new KettleStepException( BaseMessages.getString( PKG, "HTTP.Exception.CouldnotFindField", meta
          .getArgumentField()[ i ] ) );
      }
    }
  }

  return callHttpService( rowMeta, row );
}
 
Example 2
Source File: CPythonScriptExecutorMeta.java    From pentaho-cpython-plugin with Apache License 2.0 6 votes vote down vote up
/**
 * Given a fully defined output row metadata structure, determine which of the output fields are being copied from
 * the input fields and which must be the output of the script.
 *
 * @param fullOutputRowMeta    the fully defined output row metadata structure
 * @param scriptFields         row meta that will hold script only fields
 * @param inputPresentInOutput row meta that will hold input fields being copied
 * @param infos                the array of info row metas
 * @param stepName             the name of the step
 */
protected void determineInputFieldScriptFieldSplit( RowMetaInterface fullOutputRowMeta, RowMetaInterface scriptFields,
    RowMetaInterface inputPresentInOutput, RowMetaInterface[] infos, String stepName ) {

  scriptFields.clear();
  inputPresentInOutput.clear();
  RowMetaInterface consolidatedInputFields = new RowMeta();
  for ( RowMetaInterface r : infos ) {
    consolidatedInputFields.addRowMeta( r );
  }

  for ( ValueMetaInterface vm : fullOutputRowMeta.getValueMetaList() ) {
    int index = consolidatedInputFields.indexOfValue( vm.getName() );
    if ( index >= 0 ) {
      inputPresentInOutput.addValueMeta( vm );
    } else {
      // must be a script output (either a variable name field or data frame column name
      scriptFields.addValueMeta( vm );
    }
  }
}
 
Example 3
Source File: CombinationLookupMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep,
                       VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  ValueMetaInterface v = new ValueMetaInteger( technicalKeyField );
  v.setLength( 10 );
  v.setPrecision( 0 );
  v.setOrigin( origin );
  row.addValueMeta( v );

  if ( replaceFields ) {
    for ( int i = 0; i < keyField.length; i++ ) {
      int idx = row.indexOfValue( keyField[ i ] );
      if ( idx >= 0 ) {
        row.removeValueMeta( idx );
      }
    }
  }
}
 
Example 4
Source File: GetPreviousRowFieldMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {

  // Add new field?
  for ( int i = 0; i < fieldOutStream.length; i++ ) {
    if ( !Utils.isEmpty( fieldOutStream[i] ) ) {
      int index = inputRowMeta.indexOfValue( fieldInStream[i] );
      if ( index >= 0 ) {
        ValueMetaInterface in = inputRowMeta.getValueMeta( index );
        try {
          ValueMetaInterface v =
            ValueMetaFactory.createValueMeta( space.environmentSubstitute( fieldOutStream[i] ), in.getType() );
          v.setName( space.environmentSubstitute( fieldOutStream[i] ) );
          v.setLength( in.getLength() );
          v.setPrecision( in.getPrecision() );
          v.setConversionMask( in.getConversionMask() );
          v.setOrigin( name );
          inputRowMeta.addValueMeta( v );
        } catch ( Exception e ) {
          throw new KettleStepException( e );
        }
      }
    }
  }
}
 
Example 5
Source File: TextFileOutput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void initFieldNumbers( RowMetaInterface outputRowMeta, TextFileField[] outputFields ) throws KettleException {
  data.fieldnrs = new int[outputFields.length];
  for ( int i = 0; i < outputFields.length; i++ ) {
    data.fieldnrs[i] = outputRowMeta.indexOfValue( outputFields[i].getName() );
    if ( data.fieldnrs[i] < 0 ) {
      throw new KettleStepException( "Field [" + outputFields[i].getName()
        + "] couldn't be found in the input stream!" );
    }
  }
}
 
Example 6
Source File: FlattenerMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {

  // Remove the key value (there will be different entries for each output row)
  //
  if ( fieldName != null && fieldName.length() > 0 ) {
    int idx = row.indexOfValue( fieldName );
    if ( idx < 0 ) {
      throw new KettleStepException( BaseMessages.getString(
        PKG, "FlattenerMeta.Exception.UnableToLocateFieldInInputFields", fieldName ) );
    }

    ValueMetaInterface v = row.getValueMeta( idx );
    row.removeValueMeta( idx );

    for ( int i = 0; i < targetField.length; i++ ) {
      ValueMetaInterface value = v.clone();
      value.setName( targetField[i] );
      value.setOrigin( name );

      row.addValueMeta( value );
    }
  } else {
    throw new KettleStepException( BaseMessages.getString( PKG, "FlattenerMeta.Exception.FlattenFieldRequired" ) );
  }
}
 
Example 7
Source File: FilterRowsMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  // Clear the sortedDescending flag on fields used within the condition - otherwise the comparisons will be
  // inverted!!
  String[] conditionField = condition.getUsedFields();
  for ( int i = 0; i < conditionField.length; i++ ) {
    int idx = rowMeta.indexOfValue( conditionField[i] );
    if ( idx >= 0 ) {
      ValueMetaInterface valueMeta = rowMeta.getValueMeta( idx );
      valueMeta.setSortedDescending( false );
    }
  }
}
 
Example 8
Source File: AnalyticQueryMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  // re-assemble a new row of metadata
  //
  RowMetaInterface fields = new RowMeta();

  // Add existing values
  fields.addRowMeta( r );

  // add analytic values
  for ( int i = 0; i < number_of_fields; i++ ) {

    int index_of_subject = -1;
    index_of_subject = r.indexOfValue( subjectField[i] );

    // if we found the subjectField in the RowMetaInterface, and we should....
    if ( index_of_subject > -1 ) {
      ValueMetaInterface vmi = r.getValueMeta( index_of_subject ).clone();
      vmi.setOrigin( origin );
      vmi.setName( aggregateField[i] );
      fields.addValueMeta( r.size() + i, vmi );
    } else {
      // we have a condition where the subjectField can't be found from the rowMetaInterface
      StringBuilder sbfieldNames = new StringBuilder();
      String[] fieldNames = r.getFieldNames();
      for ( int j = 0; j < fieldNames.length; j++ ) {
        sbfieldNames.append( "[" + fieldNames[j] + "]" + ( j < fieldNames.length - 1 ? ", " : "" ) );
      }
      throw new KettleStepException( BaseMessages.getString(
        PKG, "AnalyticQueryMeta.Exception.SubjectFieldNotFound", getParentStepMeta().getName(),
        subjectField[i], sbfieldNames.toString() ) );
    }
  }

  r.clear();
  // Add back to Row Meta
  r.addRowMeta( fields );
}
 
Example 9
Source File: ElasticSearchBulkMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void checkInputFields( List<CheckResultInterface> remarks, RowMetaInterface prev, StepMeta stepMeta ) {

    if ( prev != null && prev.size() > 0 ) {
      if ( isJsonInsert() ) { // JSON
        if ( StringUtils.isBlank( getJsonField() ) ) { // jsonField not set
          String jsonFieldLabel = BaseMessages.getString( PKG, "ElasticSearchBulkDialog.JsonField.Label" );
          String isJsonLabel = BaseMessages.getString( PKG, "ElasticSearchBulkDialog.IsJson.Label" );
          remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
                  "ElasticSearchBulkMeta.CheckResult.MissingRequiredDependent", jsonFieldLabel, isJsonLabel ),
                  stepMeta ) );
        } else if ( prev.indexOfValue( getJsonField() ) < 0 ) { // jsonField not in input
          remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
                  "ElasticSearchBulkMeta.CheckResult.MissingInput", getJsonField() ), stepMeta ) );
        }
      } else { // not JSON
        for ( Field f : fields ) {
          if ( prev.indexOfValue( f.name ) < 0 ) { // fields not found
            remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
                    "ElasticSearchBulkMeta.CheckResult.MissingInput", f.name ), stepMeta ) );
          }
        }
      }
    } else { // no input
      remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
              "ElasticSearchBulkMeta.CheckResult.NoInput" ), stepMeta ) );
    }

  }
 
Example 10
Source File: TextFileInput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Adds <code>String</code> value meta with given name if not present and returns index
 *
 * @param rowMeta
 * @param fieldName
 * @return Index in row meta of value meta with <code>fieldName</code>
 */
private int addValueMeta( RowMetaInterface rowMeta, String fieldName ) {
  ValueMetaInterface valueMeta = new ValueMetaString( fieldName );
  valueMeta.setOrigin( getStepname() );
  // add if doesn't exist
  int index = -1;
  if ( !rowMeta.exists( valueMeta ) ) {
    index = rowMeta.size();
    rowMeta.addValueMeta( valueMeta );
  } else {
    index = rowMeta.indexOfValue( fieldName );
  }
  return index;
}
 
Example 11
Source File: MetaInject.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * package-local visibility for testing purposes
 */
void setEntryValueIfFieldExists( StepInjectionMetaEntry entry, RowMetaAndData row, SourceStepField source )
  throws KettleValueException {
  RowMetaInterface rowMeta = row.getRowMeta();
  if ( rowMeta.indexOfValue( source.getField() ) < 0 ) {
    return;
  }
  setEntryValue( entry, row, source );
}
 
Example 12
Source File: FieldSplitterMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  // Remove the field to split
  int idx = r.indexOfValue( getSplitField() );
  if ( idx < 0 ) { // not found
    throw new RuntimeException( BaseMessages.getString(
      PKG, "FieldSplitter.Log.CouldNotFindFieldToSplit", getSplitField() ) );
  }

  // Add the new fields at the place of the index --> replace!
  int count = getFieldsCount();
  for ( int i = 0; i < count; i++ ) {
    try {
      final ValueMetaInterface v = ValueMetaFactory.createValueMeta( getFieldName()[i], getFieldType()[i] );
      v.setLength( getFieldLength()[i], getFieldPrecision()[i] );
      v.setOrigin( name );
      v.setConversionMask( getFieldFormat()[i] );
      v.setDecimalSymbol( getFieldDecimal()[i] );
      v.setGroupingSymbol( getFieldGroup()[i] );
      v.setCurrencySymbol( getFieldCurrency()[i] );
      v.setTrimType( getFieldTrimType()[i] );
      // TODO when implemented in UI
      // v.setDateFormatLenient(dateFormatLenient);
      // TODO when implemented in UI
      // v.setDateFormatLocale(dateFormatLocale);
      if ( i == 0 && idx >= 0 ) {
        // the first valueMeta (splitField) will be replaced
        r.setValueMeta( idx, v );
      } else {
        // other valueMeta will be added
        if ( idx >= r.size() ) {
          r.addValueMeta( v );
        }
        r.addValueMeta( idx + i, v );
      }
    } catch ( Exception e ) {
      throw new KettleStepException( e );
    }
  }
}
 
Example 13
Source File: SortedMergeMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep,
  VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
  // Set the sorted properties: ascending/descending
  for ( int i = 0; i < fieldName.length; i++ ) {
    int idx = inputRowMeta.indexOfValue( fieldName[i] );
    if ( idx >= 0 ) {
      ValueMetaInterface valueMeta = inputRowMeta.getValueMeta( idx );
      valueMeta.setSortedDescending( !ascending[i] );

      // TODO: add case insensivity
    }
  }

}
 
Example 14
Source File: SortRowsMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings( "WeakerAccess" )
public void assignSortingCriteria( RowMetaInterface inputRowMeta ) {
  for ( int i = 0; i < fieldName.length; i++ ) {
    int idx = inputRowMeta.indexOfValue( fieldName[i] );
    if ( idx >= 0 ) {
      ValueMetaInterface valueMeta = inputRowMeta.getValueMeta( idx );
      // On all these valueMetas, check to see if the value actually exists before we try to
      // set them.
      if ( ascending.length > i ) {
        valueMeta.setSortedDescending( !ascending[i] );
      }
      if ( caseSensitive.length > i ) {
        valueMeta.setCaseInsensitive( !caseSensitive[i] );
      }
      if ( collatorEnabled.length > i ) {
        valueMeta.setCollatorDisabled( !collatorEnabled[i] );
      }
      if ( collatorStrength.length > i ) {
        valueMeta.setCollatorStrength( collatorStrength[i] );
      }
      // Also see if lazy conversion is active on these key fields.
      // If so we want to automatically convert them to the normal storage type.
      // This will improve performance, see also: PDI-346
      //
      valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
      valueMeta.setStorageMetadata( null );
    }
  }
}
 
Example 15
Source File: DimensionLookup.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Pre-load the cache by reading the whole dimension table from disk...
 *
 * @throws KettleException in case there is a database or cache problem.
 */
private void preloadCache() throws KettleException {
  try {
    DatabaseMeta databaseMeta = meta.getDatabaseMeta();

    // tk, version, from, to, natural keys, retrieval fields...
    //
    String sql = "SELECT " + databaseMeta.quoteField( meta.getKeyField() );
    // sql+=", "+databaseMeta.quoteField(meta.getVersionField());
    for ( int i = 0; i < meta.getKeyLookup().length; i++ ) {
      sql += ", " + meta.getKeyLookup()[ i ]; // the natural key field in the table
    }
    for ( int i = 0; i < meta.getFieldLookup().length; i++ ) {
      sql += ", " + meta.getFieldLookup()[ i ]; // the extra fields to retrieve...
    }
    sql += ", " + databaseMeta.quoteField( meta.getDateFrom() ); // extra info in cache
    sql += ", " + databaseMeta.quoteField( meta.getDateTo() ); // extra info in cache

    sql += " FROM " + data.schemaTable;
    logDetailed( "Pre-loading cache by reading from database with: " + Const.CR + sql + Const.CR );

    List<Object[]> rows = data.db.getRows( sql, -1 );
    RowMetaInterface rowMeta = data.db.getReturnRowMeta();

    data.preloadKeyIndexes = new int[ meta.getKeyLookup().length ];
    for ( int i = 0; i < data.preloadKeyIndexes.length; i++ ) {
      data.preloadKeyIndexes[ i ] = rowMeta.indexOfValue( meta.getKeyLookup()[ i ] ); // the field in the table
    }
    data.preloadFromDateIndex = rowMeta.indexOfValue( meta.getDateFrom() );
    data.preloadToDateIndex = rowMeta.indexOfValue( meta.getDateTo() );

    data.preloadCache =
      new DimensionCache( rowMeta, data.preloadKeyIndexes, data.preloadFromDateIndex, data.preloadToDateIndex );
    data.preloadCache.setRowCache( rows );

    logDetailed( "Sorting the cache rows..." );
    data.preloadCache.sortRows();
    logDetailed( "Sorting of cached rows finished." );

    // Also see what indexes to take to populate the lookup row...
    // We only ever compare indexes and the lookup date in the cache, the rest is not needed...
    //
    data.preloadIndexes = new ArrayList<Integer>();
    for ( int i = 0; i < meta.getKeyStream().length; i++ ) {
      int index = data.inputRowMeta.indexOfValue( meta.getKeyStream()[ i ] );
      if ( index < 0 ) {
        // Just to be safe...
        //
        throw new KettleStepException( BaseMessages.getString(
          PKG, "DimensionLookup.Exception.KeyFieldNotFound", meta.getFieldStream()[ i ] ) );
      }
      data.preloadIndexes.add( index );
    }

    // This is all for now...
  } catch ( Exception e ) {
    throw new KettleException( "Error encountered during cache pre-load", e );
  }
}
 
Example 16
Source File: SelectValuesMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public void getMetadataFields( RowMetaInterface inputRowMeta, String name, VariableSpace space ) throws KettlePluginException {
  if ( meta != null && meta.length > 0 ) {
    // METADATA mode: change the meta-data of the values mentioned...

    for ( int i = 0; i < meta.length; i++ ) {
      SelectMetadataChange metaChange = meta[i];

      int idx = inputRowMeta.indexOfValue( metaChange.getName() );
      boolean metaTypeChangeUsesNewTypeDefaults = false; // Normal behavior as of 5.x or so
      if ( space != null ) {
        metaTypeChangeUsesNewTypeDefaults = ValueMetaBase.convertStringToBoolean(
            space.getVariable( Const.KETTLE_COMPATIBILITY_SELECT_VALUES_TYPE_CHANGE_USES_TYPE_DEFAULTS, "N" ) );
      }
      if ( idx >= 0 ) { // We found the value

        // This is the value we need to change:
        ValueMetaInterface v = inputRowMeta.getValueMeta( idx );

        // Do we need to rename ?
        if ( !v.getName().equals( metaChange.getRename() ) && !Utils.isEmpty( metaChange.getRename() ) ) {
          v.setName( metaChange.getRename() );
          v.setOrigin( name );
          // need to reinsert to check name conflicts
          inputRowMeta.setValueMeta( idx, v );
        }
        // Change the type?
        if ( metaChange.getType() != ValueMetaInterface.TYPE_NONE && v.getType() != metaChange.getType() ) {
          // Fix for PDI-16388 - clone copies over the conversion mask instead of using the default for the new type
          if ( !metaTypeChangeUsesNewTypeDefaults ) {
            v = ValueMetaFactory.cloneValueMeta( v, metaChange.getType() );
          } else {
            v = ValueMetaFactory.createValueMeta( v.getName(), metaChange.getType() );
          }

          // This is now a copy, replace it in the row!
          //
          inputRowMeta.setValueMeta( idx, v );

          // This also moves the data to normal storage type
          //
          v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
        }
        if ( metaChange.getLength() != UNDEFINED ) {
          v.setLength( metaChange.getLength() );
          v.setOrigin( name );
        }
        if ( metaChange.getPrecision() != UNDEFINED ) {
          v.setPrecision( metaChange.getPrecision() );
          v.setOrigin( name );
        }
        if ( metaChange.getStorageType() >= 0 ) {
          v.setStorageType( metaChange.getStorageType() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getConversionMask() ) ) {
          v.setConversionMask( metaChange.getConversionMask() );
          v.setOrigin( name );
        }

        v.setDateFormatLenient( metaChange.isDateFormatLenient() );
        v.setDateFormatLocale( EnvUtil.createLocale( metaChange.getDateFormatLocale() ) );
        v.setDateFormatTimeZone( EnvUtil.createTimeZone( metaChange.getDateFormatTimeZone() ) );
        v.setLenientStringToNumber( metaChange.isLenientStringToNumber() );

        if ( !Utils.isEmpty( metaChange.getEncoding() ) ) {
          v.setStringEncoding( metaChange.getEncoding() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getDecimalSymbol() ) ) {
          v.setDecimalSymbol( metaChange.getDecimalSymbol() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getGroupingSymbol() ) ) {
          v.setGroupingSymbol( metaChange.getGroupingSymbol() );
          v.setOrigin( name );
        }
        if ( !Utils.isEmpty( metaChange.getCurrencySymbol() ) ) {
          v.setCurrencySymbol( metaChange.getCurrencySymbol() );
          v.setOrigin( name );
        }
      }
    }
  }
}
 
Example 17
Source File: KettleDatabaseRepositoryConnectionDelegate.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
protected void verifyVersion() throws KettleException {
  RowMetaAndData lastUpgrade = null;
  String versionTable =
    databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_VERSION );
  try {
    lastUpgrade = callRead( () ->
      database.getOneRow( "SELECT "
        + quote( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION ) + ", "
        + quote( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION ) + ", "
        + quote( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE ) + " FROM " + versionTable
        + " ORDER BY " + quote( KettleDatabaseRepository.FIELD_VERSION_UPGRADE_DATE ) + " DESC" ) );
  } catch ( Exception e ) {
    try {
      // See if the repository exists at all. For this we verify table R_USER.
      //
      String userTable =
        databaseMeta.getQuotedSchemaTableCombination( null, KettleDatabaseRepository.TABLE_R_USER );
      callRead( () -> database.getOneRow( "SELECT * FROM " + userTable ) );

      // Still here? That means we have a repository...
      //
      // If we can't retrieve the last available upgrade date:
      // this means the R_VERSION table doesn't exist.
      // This table was introduced in version 2.3.0
      //
      if ( log.isBasic() ) {
        log.logBasic( BaseMessages.getString( PKG, "Repository.Error.GettingInfoVersionTable", versionTable ) );
        log.logBasic( BaseMessages.getString( PKG, "Repository.Error.NewTable" ) );
        log.logBasic( "Stack trace: " + Const.getStackTracker( e ) );
      }
      majorVersion = 2;
      minorVersion = 2;

      lastUpgrade = null;
    } catch ( Exception ex ) {
      throw new KettleException( BaseMessages.getString( PKG, "Repository.NoRepositoryExists.Messages" ) );
    }
  }

  if ( lastUpgrade != null ) {
    majorVersion = (int) lastUpgrade.getInteger( KettleDatabaseRepository.FIELD_VERSION_MAJOR_VERSION, -1 );
    minorVersion = (int) lastUpgrade.getInteger( KettleDatabaseRepository.FIELD_VERSION_MINOR_VERSION, -1 );
  }

  if ( majorVersion < REQUIRED_MAJOR_VERSION
    || ( majorVersion == REQUIRED_MAJOR_VERSION && minorVersion < REQUIRED_MINOR_VERSION ) ) {
    throw new KettleException( BaseMessages.getString(
      PKG, "Repository.UpgradeRequired.Message", getVersion(), getRequiredVersion() ) );
  }

  if ( majorVersion == 3 && minorVersion == 0 ) {
    // The exception: someone upgraded the repository to version 3.0.0
    // In that version, one column got named incorrectly.
    // Another upgrade to 3.0.1 or later will fix that.
    // However, since we don't have point versions in here, we'll have to look
    // at the column in question...
    //
    String errorColumn = "TRANSFORMATION";
    RowMetaInterface tableFields =
      callRead( () -> database.getTableFieldsMeta( null, KettleDatabaseRepository.TABLE_R_TRANS_PARTITION_SCHEMA ) );
    if ( tableFields.indexOfValue( errorColumn ) >= 0 ) {
      throw new KettleException( BaseMessages.getString( PKG, "Repository.FixFor300Required.Message" ) );
    }
  }
}
 
Example 18
Source File: XBaseInput.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
  meta = (XBaseInputMeta) smi;
  data = (XBaseInputData) sdi;

  // See if we need to get a list of files from input...
  if ( first ) { // we just got started

    first = false;

    // The output row meta data, what does it look like?
    //
    data.outputRowMeta = new RowMeta();

    if ( meta.isAcceptingFilenames() ) {
      // Read the files from the specified input stream...
      data.files.getFiles().clear();

      int idx = -1;

      RowSet rowSet = findInputRowSet( meta.getAcceptingStepName() );
      Object[] fileRowData = getRowFrom( rowSet );
      while ( fileRowData != null ) {
        RowMetaInterface fileRowMeta = rowSet.getRowMeta();
        if ( idx < 0 ) {
          idx = fileRowMeta.indexOfValue( meta.getAcceptingField() );
          if ( idx < 0 ) {
            logError( BaseMessages.getString( PKG, "XBaseInput.Log.Error.UnableToFindFilenameField", meta
              .getAcceptingField() ) );
            setErrors( 1 );
            stopAll();
            return false;
          }
        }
        try {
          String filename = fileRowMeta.getString( fileRowData, idx );
          data.files.addFile( KettleVFS.getFileObject( filename, getTransMeta() ) );
        } catch ( Exception e ) {
          throw new KettleException( e );
        }

        // Grab another row
        //
        fileRowData = getRowFrom( rowSet );
      }

      if ( data.files.nrOfFiles() == 0 ) {
        logBasic( BaseMessages.getString( PKG, "XBaseInput.Log.Error.NoFilesSpecified" ) );
        setOutputDone();
        return false;
      }
    }

    data.outputRowMeta = meta.getOutputFields( data.files, getStepname() );

    // Open the first file & read the required rows in the buffer, stop
    // if it fails, exception will stop processLoop
    //
    openNextFile();
  }

  // Allocate the output row in advance, because we possibly want to add a few extra fields...
  //
  Object[] row = data.xbi.getRow( RowDataUtil.allocateRowData( data.outputRowMeta.size() ) );
  while ( row == null && data.fileNr < data.files.nrOfFiles() ) { // No more rows left in this file
    openNextFile();
    row = data.xbi.getRow( RowDataUtil.allocateRowData( data.outputRowMeta.size() ) );
  }

  if ( row == null ) {
    setOutputDone(); // signal end to receiver(s)
    return false; // end of data or error.
  }

  // OK, so we have read a line: increment the input counter
  incrementLinesInput();
  int outputIndex = data.fields.size();

  // Possibly add a filename...
  if ( meta.includeFilename() ) {
    row[outputIndex++] = data.file_dbf.getName().getURI();
  }

  // Possibly add a row number...
  if ( meta.isRowNrAdded() ) {
    row[outputIndex++] = new Long( getLinesInput() );
  }

  putRow( data.outputRowMeta, row ); // fill the rowset(s). (wait for empty)

  if ( checkFeedback( getLinesInput() ) ) {
    logBasic( BaseMessages.getString( PKG, "XBaseInput.Log.LineNr" ) + getLinesInput() );
  }

  if ( meta.getRowLimit() > 0 && getLinesInput() >= meta.getRowLimit() ) { // limit has been reached: stop now.
    setOutputDone();
    return false;
  }

  return true;
}
 
Example 19
Source File: InjectDataSetIntoTransExtensionPoint.java    From pentaho-pdi-dataset with Apache License 2.0 4 votes vote down vote up
private void injectDataSetIntoStep( final Trans trans, final String dataSetName,
                                    final MetaStoreFactory<DataSet> dataSetFactory, final StepMeta stepMeta,
                                    TransUnitTestSetLocation inputLocation ) throws MetaStoreException, KettleException {

  final DataSet dataSet = dataSetFactory.loadElement( dataSetName );
  final LogChannelInterface log = trans.getLogChannel();

  final RowProducer rowProducer = trans.addRowProducer( stepMeta.getName(), 0 );

  // Look for the step into which we'll inject rows...
  //
  StepMetaDataCombi combi = null;
  for ( StepMetaDataCombi step : trans.getSteps() ) {
    if ( step.stepname.equals( stepMeta.getName() ) ) {
      combi = step;
      break;
    }
  }

  if ( combi != null ) {

    // Get the rows of the mapped values in the mapped order sorted as asked
    //
    final List<Object[]> dataSetRows = dataSet.getAllRows( log, inputLocation );
    RowMetaInterface dataSetRowMeta = dataSet.getMappedDataSetFieldsRowMeta( inputLocation );

    // The rows to inject are always driven by the dataset, NOT the step it replaces (!) for simplicity
    //
    RowMetaInterface injectRowMeta = new RowMeta();

    // Figure out which fields to pass
    // Only inject those mentioned in the field mappings...
    //
    int[] fieldIndexes = new int[ inputLocation.getFieldMappings().size() ];
    for ( int i = 0; i < inputLocation.getFieldMappings().size(); i++ ) {
      TransUnitTestFieldMapping fieldMapping = inputLocation.getFieldMappings().get( i );
      fieldIndexes[ i ] = dataSetRowMeta.indexOfValue( fieldMapping.getDataSetFieldName() );
      if ( fieldIndexes[ i ] < 0 ) {
        throw new KettleException( "Unable to find mapped field '" + fieldMapping.getDataSetFieldName() + "' in data set '" + dataSet.getName() + "'" );
      }
      ValueMetaInterface injectValueMeta = dataSetRowMeta.getValueMeta( fieldIndexes[ i ] ).clone();
      // Rename to the step output names though...
      //
      injectValueMeta.setName( fieldMapping.getStepFieldName() );
      injectRowMeta.addValueMeta( injectValueMeta );
    }

    log.logDetailed( "Injecting data set '" + dataSetName + "' into step '" + stepMeta.getName() + "', fields: " + Arrays.toString( injectRowMeta.getFieldNames() ) );

    // Pass rows
    //
    Runnable runnable = new Runnable() {
      @Override
      public void run() {
        try {

          for ( Object[] dataSetRow : dataSetRows ) {
            // pass the row with the external names, in the right order and with the selected columns from the data set
            //
            Object[] row = RowDataUtil.allocateRowData( injectRowMeta.size() );
            for ( int i = 0; i < fieldIndexes.length; i++ ) {
              row[ i ] = dataSetRow[ fieldIndexes[ i ] ];
            }
            rowProducer.putRow( injectRowMeta, row );
          }
          rowProducer.finished();

        } catch ( Exception e ) {
          throw new RuntimeException( "Problem injecting data set '" + dataSetName + "' row into step '" + stepMeta.getName() + "'", e );
        }
      }
    };
    Thread thread = new Thread( runnable );
    thread.start();


  }
}
 
Example 20
Source File: DBProc.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private Object[] runProc( RowMetaInterface rowMeta, Object[] rowData ) throws KettleException {
  if ( first ) {
    first = false;

    // get the RowMeta for the output
    //
    data.outputMeta = data.inputRowMeta.clone();
    meta.getFields( data.outputMeta, getStepname(), null, null, this, repository, metaStore );

    data.argnrs = new int[meta.getArgument().length];
    for ( int i = 0; i < meta.getArgument().length; i++ ) {
      if ( !meta.getArgumentDirection()[i].equalsIgnoreCase( "OUT" ) ) { // IN or INOUT
        data.argnrs[i] = rowMeta.indexOfValue( meta.getArgument()[i] );
        if ( data.argnrs[i] < 0 ) {
          logError( BaseMessages.getString( PKG, "DBProc.Log.ErrorFindingField" ) + meta.getArgument()[i] + "]" );
          throw new KettleStepException( BaseMessages.getString( PKG, "DBProc.Exception.CouldnotFindField", meta
            .getArgument()[i] ) );
        }
      } else {
        data.argnrs[i] = -1;
      }
    }

    data.db.setProcLookup( environmentSubstitute( meta.getProcedure() ), meta.getArgument(), meta
      .getArgumentDirection(), meta.getArgumentType(), meta.getResultName(), meta.getResultType() );
  }

  Object[] outputRowData = RowDataUtil.resizeArray( rowData, data.outputMeta.size() );
  int outputIndex = rowMeta.size();

  data.db.setProcValues( rowMeta, rowData, data.argnrs, meta.getArgumentDirection(), !Utils.isEmpty( meta
    .getResultName() ) );

  RowMetaAndData add =
    data.db.callProcedure( meta.getArgument(), meta.getArgumentDirection(), meta.getArgumentType(), meta
      .getResultName(), meta.getResultType() );
  int addIndex = 0;

  // Function return?
  if ( !Utils.isEmpty( meta.getResultName() ) ) {
    outputRowData[outputIndex++] = add.getData()[addIndex++]; // first is the function return
  }

  // We are only expecting the OUT and INOUT arguments here.
  // The INOUT values need to replace the value with the same name in the row.
  //
  for ( int i = 0; i < data.argnrs.length; i++ ) {
    if ( meta.getArgumentDirection()[i].equalsIgnoreCase( "OUT" ) ) {
      // add
      outputRowData[outputIndex++] = add.getData()[addIndex++];
    } else if ( meta.getArgumentDirection()[i].equalsIgnoreCase( "INOUT" ) ) {
      // replace
      outputRowData[data.argnrs[i]] = add.getData()[addIndex];
      addIndex++;
    }
    // IN not taken
  }
  return outputRowData;
}