Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#convertToNormalStorageType()

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#convertToNormalStorageType() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: GroupBy.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private void addCumulativeSums( Object[] row ) throws KettleValueException {

    // We need to adjust this row with cumulative averages?
    //
    for ( int i = 0; i < data.cumulativeSumSourceIndexes.size(); i++ ) {
      int sourceIndex = data.cumulativeSumSourceIndexes.get( i );
      Object previousTarget = data.previousSums[ i ];
      Object sourceValue = row[ sourceIndex ];

      int targetIndex = data.cumulativeSumTargetIndexes.get( i );

      ValueMetaInterface sourceMeta = data.inputRowMeta.getValueMeta( sourceIndex );
      ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta( targetIndex );

      // If the first values where null, or this is the first time around, just take the source value...
      //
      if ( targetMeta.isNull( previousTarget ) ) {
        row[ targetIndex ] = sourceMeta.convertToNormalStorageType( sourceValue );
      } else {
        // If the source value is null, just take the previous target value
        //
        if ( sourceMeta.isNull( sourceValue ) ) {
          row[ targetIndex ] = previousTarget;
        } else {
          row[ targetIndex ] = ValueDataUtil.plus( targetMeta, data.previousSums[ i ], sourceMeta, row[ sourceIndex ] );
        }
      }
      data.previousSums[ i ] = row[ targetIndex ];
    }

  }
 
Example 2
Source File: MemoryGroupByData.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private Object[] getHashValue() throws KettleValueException {
  Object[] groupDataHash = new Object[groupMeta.size()];
  for ( int i = 0; i < groupMeta.size(); i++ ) {
    ValueMetaInterface valueMeta = groupMeta.getValueMeta( i );
    groupDataHash[i] = valueMeta.convertToNormalStorageType( groupData[i] );
  }
  return groupDataHash;
}
 
Example 3
Source File: SalesforceStep.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * normalize object for future sent in Salesforce
 *
 * @param valueMeta value meta
 * @param value pentaho internal value object
 * @return object for sending in Salesforce
 * @throws KettleValueException
 */
public Object normalizeValue( ValueMetaInterface valueMeta, Object value ) throws KettleValueException {
  if ( valueMeta.isDate() ) {
    // Pass date field converted to UTC, see PDI-10836
    Calendar cal = Calendar.getInstance( valueMeta.getDateFormatTimeZone() );
    cal.setTime( valueMeta.getDate( value ) );
    Calendar utc = Calendar.getInstance( TimeZone.getTimeZone( "UTC" ) );
    // Reset time-related fields
    utc.clear();
    utc.set( cal.get( Calendar.YEAR ), cal.get( Calendar.MONTH ), cal.get( Calendar.DATE ),
      cal.get( Calendar.HOUR_OF_DAY ), cal.get( Calendar.MINUTE ), cal.get( Calendar.SECOND ) );
    value = utc;
  } else if ( valueMeta.isStorageBinaryString() ) {
    value = valueMeta.convertToNormalStorageType( value );
  }

  if ( ValueMetaInterface.TYPE_INTEGER == valueMeta.getType() ) {
    // Salesforce integer values can be only http://www.w3.org/2001/XMLSchema:int
    // see org.pentaho.di.ui.trans.steps.salesforceinput.SalesforceInputDialog#addFieldToTable
    // So we need convert Hitachi Vantara integer (real java Long value) to real int.
    // It will be sent correct as http://www.w3.org/2001/XMLSchema:int

    // use checked cast for prevent losing data
    value = Ints.checkedCast( (Long) value );
  }
  return value;
}
 
Example 4
Source File: JavaFilter.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean calcFields( RowMetaInterface rowMeta, Object[] r ) throws KettleValueException {
  try {
    // Initialize evaluators etc. Only do it once.
    //
    if ( data.expressionEvaluator == null ) {
      String realCondition = environmentSubstitute( meta.getCondition() );
      data.argumentIndexes = new ArrayList<Integer>();

      List<String> parameterNames = new ArrayList<String>();
      List<Class<?>> parameterTypes = new ArrayList<Class<?>>();

      for ( int i = 0; i < data.outputRowMeta.size(); i++ ) {

        ValueMetaInterface valueMeta = data.outputRowMeta.getValueMeta( i );

        // See if the value is being used in a formula...
        //
        if ( realCondition.contains( valueMeta.getName() ) ) {
          // If so, add it to the indexes...
          data.argumentIndexes.add( i );

          Class<?> parameterType;
          switch ( valueMeta.getType() ) {
            case ValueMetaInterface.TYPE_STRING:
              parameterType = String.class;
              break;
            case ValueMetaInterface.TYPE_NUMBER:
              parameterType = Double.class;
              break;
            case ValueMetaInterface.TYPE_INTEGER:
              parameterType = Long.class;
              break;
            case ValueMetaInterface.TYPE_DATE:
              parameterType = Date.class;
              break;
            case ValueMetaInterface.TYPE_BIGNUMBER:
              parameterType = BigDecimal.class;
              break;
            case ValueMetaInterface.TYPE_BOOLEAN:
              parameterType = Boolean.class;
              break;
            case ValueMetaInterface.TYPE_BINARY:
              parameterType = byte[].class;
              break;
            default:
              parameterType = String.class;
              break;
          }
          parameterTypes.add( parameterType );
          parameterNames.add( valueMeta.getName() );
        }
      }

      // Create the expression evaluator: is relatively slow so we do it only for the first row...
      //
      data.expressionEvaluator = new ExpressionEvaluator();
      data.expressionEvaluator.setParameters(
        parameterNames.toArray( new String[parameterNames.size()] ), parameterTypes
          .toArray( new Class<?>[parameterTypes.size()] ) );
      data.expressionEvaluator.setReturnType( Object.class );
      data.expressionEvaluator.setThrownExceptions( new Class<?>[] { Exception.class } );
      data.expressionEvaluator.cook( realCondition );

      // Also create the argument data structure once...
      //
      data.argumentData = new Object[data.argumentIndexes.size()];
    }

    // This method can only accept the specified number of values...
    //
    for ( int x = 0; x < data.argumentIndexes.size(); x++ ) {
      int index = data.argumentIndexes.get( x );
      ValueMetaInterface outputValueMeta = data.outputRowMeta.getValueMeta( index );
      data.argumentData[x] = outputValueMeta.convertToNormalStorageType( r[index] );
    }

    Object formulaResult = data.expressionEvaluator.evaluate( data.argumentData );

    if ( formulaResult instanceof Boolean ) {
      return (Boolean) formulaResult;
    } else {
      throw new KettleException( "The result of the filter expression must be a boolean and we got back : "
        + formulaResult.getClass().getName() );
    }
  } catch ( Exception e ) {
    throw new KettleValueException( e );
  }
}
 
Example 5
Source File: GroupBy.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void addCumulativeAverages( Object[] row ) throws KettleValueException {

    // We need to adjust this row with cumulative sums
    //
    for ( int i = 0; i < data.cumulativeAvgSourceIndexes.size(); i++ ) {
      int sourceIndex = data.cumulativeAvgSourceIndexes.get( i );
      Object previousTarget = data.previousAvgSum[ i ];
      Object sourceValue = row[ sourceIndex ];

      int targetIndex = data.cumulativeAvgTargetIndexes.get( i );

      ValueMetaInterface sourceMeta = data.inputRowMeta.getValueMeta( sourceIndex );
      ValueMetaInterface targetMeta = data.outputRowMeta.getValueMeta( targetIndex );

      // If the first values where null, or this is the first time around, just take the source value...
      //
      Object sum = null;

      if ( targetMeta.isNull( previousTarget ) ) {
        sum = sourceMeta.convertToNormalStorageType( sourceValue );
      } else {
        // If the source value is null, just take the previous target value
        //
        if ( sourceMeta.isNull( sourceValue ) ) {
          sum = previousTarget;
        } else {
          if ( sourceMeta.isInteger() ) {
            sum = ValueDataUtil.plus( data.valueMetaInteger, data.previousAvgSum[ i ], sourceMeta, row[ sourceIndex ] );
          } else {
            sum = ValueDataUtil.plus( targetMeta, data.previousAvgSum[ i ], sourceMeta, row[ sourceIndex ] );
          }
        }
      }
      data.previousAvgSum[ i ] = sum;

      if ( !sourceMeta.isNull( sourceValue ) ) {
        data.previousAvgCount[ i ]++;
      }

      if ( sourceMeta.isInteger() ) {
        // Change to number as the exception
        //
        if ( sum == null ) {
          row[ targetIndex ] = null;
        } else {
          row[ targetIndex ] = new Double( ( (Long) sum ).doubleValue() / data.previousAvgCount[ i ] );
        }
      } else {
        row[ targetIndex ] = ValueDataUtil.divide( targetMeta, sum, data.valueMetaInteger, data.previousAvgCount[ i ] );
      }
    }

  }
 
Example 6
Source File: StreamLookup.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private boolean readLookupValues() throws KettleException {
  data.infoStream = meta.getStepIOMeta().getInfoStreams().get( 0 );
  if ( data.infoStream.getStepMeta() == null ) {
    logError( BaseMessages.getString( PKG, "StreamLookup.Log.NoLookupStepSpecified" ) );
    return false;
  }
  if ( log.isDetailed() ) {
    logDetailed( BaseMessages.getString( PKG, "StreamLookup.Log.ReadingFromStream" )
      + data.infoStream.getStepname() + "]" );
  }

  int[] keyNrs = new int[meta.getKeylookup().length];
  int[] valueNrs = new int[meta.getValue().length];
  boolean firstRun = true;

  // Which row set do we read from?
  //
  RowSet rowSet = findInputRowSet( data.infoStream.getStepname() );
  Object[] rowData = getRowFrom( rowSet ); // rows are originating from "lookup_from"
  while ( rowData != null ) {
    if ( log.isRowLevel() ) {
      logRowlevel( BaseMessages.getString( PKG, "StreamLookup.Log.ReadLookupRow" )
        + rowSet.getRowMeta().getString( rowData ) );
    }

    if ( firstRun ) {
      firstRun = false;
      data.hasLookupRows = true;

      data.infoMeta = rowSet.getRowMeta().clone();
      RowMetaInterface cacheKeyMeta = new RowMeta();
      RowMetaInterface cacheValueMeta = new RowMeta();

 // Look up the keys in the source rows
      for ( int i = 0; i < meta.getKeylookup().length; i++ ) {
        keyNrs[i] = rowSet.getRowMeta().indexOfValue( meta.getKeylookup()[i] );
        if ( keyNrs[i] < 0 ) {
          throw new KettleStepException( BaseMessages.getString(
            PKG, "StreamLookup.Exception.UnableToFindField", meta.getKeylookup()[i] ) );
        }
        cacheKeyMeta.addValueMeta( rowSet.getRowMeta().getValueMeta( keyNrs[i] ) );
      }
     // Save the data types of the keys to optionally convert input rows later on...
      if ( data.keyTypes == null ) {
        data.keyTypes = cacheKeyMeta.clone();
      }

      // Cache keys are stored as normal types, not binary
      for ( int i = 0; i < keyNrs.length; i++ ) {
        cacheKeyMeta.getValueMeta( i ).setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
      }

     for ( int v = 0; v < meta.getValue().length; v++ ) {
        valueNrs[v] = rowSet.getRowMeta().indexOfValue( meta.getValue()[v] );
        if ( valueNrs[v] < 0 ) {
          throw new KettleStepException( BaseMessages.getString(
            PKG, "StreamLookup.Exception.UnableToFindField", meta.getValue()[v] ) );
        }
        cacheValueMeta.addValueMeta( rowSet.getRowMeta().getValueMeta( valueNrs[v] ) );
      }

      data.cacheKeyMeta = cacheKeyMeta;
      data.cacheValueMeta = cacheValueMeta;
    }

    Object[] keyData = new Object[keyNrs.length];
    for ( int i = 0; i < keyNrs.length; i++ ) {
      ValueMetaInterface keyMeta = data.keyTypes.getValueMeta( i );
      // Convert keys to normal storage type
      keyData[i] = keyMeta.convertToNormalStorageType( rowData[keyNrs[i]] );
    }

    Object[] valueData = new Object[valueNrs.length];
    for ( int i = 0; i < valueNrs.length; i++ ) {
      // Store value as is, avoid preliminary binary->normal storage type conversion
      valueData[i] = rowData[valueNrs[i]];
    }

    addToCache( data.cacheKeyMeta, keyData, data.cacheValueMeta, valueData );

    rowData = getRowFrom( rowSet );
  }

  return true;
}