Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#toStringMeta()

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#toStringMeta() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PGBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
          fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 2
Source File: DeleteMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    // Lookup: we do a lookup on the natural keys
    for ( int i = 0; i < keyLookup.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( keyStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_DELETE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), tableName, keyLookup[i], keyStream[i],
          v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 3
Source File: MonetDBBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
    RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
    IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
          new DatabaseImpact(
              DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
              .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
              fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 4
Source File: OraBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
          fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 5
Source File: MySQLBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
    RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
    IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
          new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(),
              databaseMeta.getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
              fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 6
Source File: ValueMetaInternetAddress.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Convert the specified data to the data type specified in this object.
 *
 * @param meta2
 *          the metadata of the object to be converted
 * @param data2
 *          the data of the object to be converted
 * @return the object in the data type of this value metadata object
 * @throws KettleValueException
 *           in case there is a data conversion error
 */
@Override
public Object convertData( ValueMetaInterface meta2, Object data2 ) throws KettleValueException {
  switch ( meta2.getType() ) {
    case TYPE_STRING:
      return convertStringToInternetAddress( meta2.getString( data2 ) );
    case TYPE_INTEGER:
      return convertIntegerToInternetAddress( meta2.getInteger( data2 ) );
    case TYPE_NUMBER:
      return convertNumberToInternetAddress( meta2.getNumber( data2 ) );
    case TYPE_BIGNUMBER:
      return convertBigNumberToInternetAddress( meta2.getBigNumber( data2 ) );
    default:
      throw new KettleValueException( meta2.toStringMeta() + " : can't be converted to an Internet Address" );
  }
}
 
Example 7
Source File: ValueMetaTimestamp.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Convert the specified data to the data type specified in this object.
 *
 * @param meta2 the metadata of the object to be converted
 * @param data2 the data of the object to be converted
 * @return the object in the data type of this value metadata object
 * @throws KettleValueException in case there is a data conversion error
 */
@Override
public Object convertData( ValueMetaInterface meta2, Object data2 ) throws KettleValueException {
  switch ( meta2.getType() ) {
    case TYPE_TIMESTAMP:
      return ( (ValueMetaTimestamp) meta2 ).getTimestamp( data2 );
    case TYPE_STRING:
      return convertStringToTimestamp( meta2.getString( data2 ) );
    case TYPE_INTEGER:
      return convertIntegerToTimestamp( meta2.getInteger( data2 ) );
    case TYPE_NUMBER:
      return convertNumberToTimestamp( meta2.getNumber( data2 ) );
    case TYPE_DATE:
      return convertDateToTimestamp( meta2.getDate( data2 ) );
    case TYPE_BIGNUMBER:
      return convertBigNumberToTimestamp( meta2.getBigNumber( data2 ) );
    default:
      throw new KettleValueException( meta2.toStringMeta() + " : can't be converted to a timestamp" );
  }
}
 
Example 8
Source File: GPLoadMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
    RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
    IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
          new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(),
              databaseMeta.getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
              fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 9
Source File: LucidDBBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
          fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 10
Source File: GPBulkLoaderMeta.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
@Override
public void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta,
  RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository,
  IMetaStore metaStore ) throws KettleStepException {
  if ( prev != null ) {
    /* DEBUG CHECK THIS */
    // Insert dateMask fields : read/write
    for ( int i = 0; i < fieldTable.length; i++ ) {
      ValueMetaInterface v = prev.searchValueMeta( fieldStream[i] );

      DatabaseImpact ii =
        new DatabaseImpact(
          DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(), stepMeta.getName(), databaseMeta
            .getDatabaseName(), transMeta.environmentSubstitute( tableName ), fieldTable[i],
          fieldStream[i], v != null ? v.getOrigin() : "?", "", "Type = " + v.toStringMeta() );
      impact.add( ii );
    }
  }
}
 
Example 11
Source File: StringToKettleFn.java    From kettle-beam with Apache License 2.0 5 votes vote down vote up
@ProcessElement
public void processElement( ProcessContext processContext ) {

  try {

    String inputString = processContext.element();
    inputCounter.inc();

    String[] components = inputString.split( separator, -1 );

    // TODO: implement enclosure in FileDefinition
    //

    Object[] row = RowDataUtil.allocateRowData( rowMeta.size() );
    int index = 0;
    while ( index < rowMeta.size() && index < components.length ) {
      String sourceString = components[ index ];
      ValueMetaInterface valueMeta = rowMeta.getValueMeta( index );
      ValueMetaInterface stringMeta = new ValueMetaString( "SourceString" );
      stringMeta.setConversionMask( valueMeta.getConversionMask() );
      try {
        row[ index ] = valueMeta.convertDataFromString( sourceString, stringMeta, null, null, ValueMetaInterface.TRIM_TYPE_NONE );
      } catch ( KettleValueException ve ) {
        throw new KettleException( "Unable to convert value '" + sourceString + "' to value : " + valueMeta.toStringMeta(), ve );
      }
      index++;
    }

    // Pass the row to the process context
    //
    processContext.output( new KettleRow( row ) );
    writtenCounter.inc();

  } catch ( Exception e ) {
    Metrics.counter( "error", stepname ).inc();
    LOG.error( "Error converting input data into Kettle rows " + processContext.element() + ", " + e.getMessage() );
    throw new RuntimeException( "Error converting input data into Kettle rows", e );

  }
}
 
Example 12
Source File: Vertica5DatabaseMeta.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * This method allows a database dialect to convert database specific data types to Kettle data types.
 *
 * @param rs
 *          The result set to use
 * @param val
 *          The description of the value to retrieve
 * @param index
 *          the index on which we need to retrieve the value, 0-based.
 * @return The correctly converted Kettle data type corresponding to the valueMeta description.
 * @throws KettleDatabaseException
 */
@Override
public Object getValueFromResultSet( ResultSet rs, ValueMetaInterface val, int index ) throws KettleDatabaseException {
  Object data;

  try {
    switch ( val.getType() ) {
      case ValueMetaInterface.TYPE_TIMESTAMP:
      case ValueMetaInterface.TYPE_DATE:
        if ( val.getOriginalColumnType() == java.sql.Types.TIMESTAMP ) {
          data = rs.getTimestamp( index + 1 );
          break; // Timestamp extends java.util.Date
        } else if ( val.getOriginalColumnType() == java.sql.Types.TIME ) {
          data = rs.getTime( index + 1 );
          break;
        } else {
          data = rs.getDate( index + 1 );
          break;
        }
      default:
        return super.getValueFromResultSet( rs, val, index );
    }
    if ( rs.wasNull() ) {
      data = null;
    }
  } catch ( SQLException e ) {
    throw new KettleDatabaseException( "Unable to get value '"
      + val.toStringMeta() + "' from database resultset, index " + index, e );
  }

  return data;
}
 
Example 13
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Compare 2 values of the same data type
 *
 * @param data1
 *          the first value
 * @param meta2
 *          the second value's metadata
 * @param data2
 *          the second value
 * @return 0 if the values are equal, -1 if data1 is smaller than data2 and +1 if it's larger.
 * @throws KettleValueException
 *           In case we get conversion errors
 */
@Override
public int compare( Object data1, ValueMetaInterface meta2, Object data2 ) throws KettleValueException {
  if ( meta2 == null ) {
    throw new KettleValueException( toStringMeta()
        + " : Second meta data (meta2) is null, please check one of the previous steps." );
  }

  try {
    // Before we can compare data1 to data2 we need to make sure they have the
    // same data type etc.
    //
    if ( getType() == meta2.getType() ) {
      if ( getStorageType() == meta2.getStorageType() ) {
        return compare( data1, data2 );
      }

      // Convert the storage type to compare the data.
      //
      switch ( getStorageType() ) {
        case STORAGE_TYPE_NORMAL:
          return compare( data1, meta2.convertToNormalStorageType( data2 ) );
        case STORAGE_TYPE_BINARY_STRING:
          if ( storageMetadata != null && storageMetadata.getConversionMask() != null && !meta2.isNumber() ) {
            // BACKLOG-18754 - if there is a storage conversion mask, we should use
            // it as the mask for meta2 (meta2 can have specific storage type and type, so
            // it can't be used directly to convert data2 to binary string)
            ValueMetaInterface meta2StorageMask = meta2.clone();
            meta2StorageMask.setConversionMask( storageMetadata.getConversionMask() );
            return compare( data1, meta2StorageMask.convertToBinaryStringStorageType( data2 ) );
          } else {
            return compare( data1, meta2.convertToBinaryStringStorageType( data2 ) );
          }
        case STORAGE_TYPE_INDEXED:
          switch ( meta2.getStorageType() ) {
            case STORAGE_TYPE_INDEXED:
              return compare( data1, data2 ); // not accessible, just to make sure.
            case STORAGE_TYPE_NORMAL:
              return -meta2.compare( data2, convertToNormalStorageType( data1 ) );
            case STORAGE_TYPE_BINARY_STRING:
              return -meta2.compare( data2, convertToBinaryStringStorageType( data1 ) );
            default:
              throw new KettleValueException( meta2.toStringMeta() + " : Unknown storage type : "
                  + meta2.getStorageType() );

          }
        default:
          throw new KettleValueException( toStringMeta() + " : Unknown storage type : " + getStorageType() );
      }
    } else if ( ValueMetaInterface.TYPE_INTEGER == getType() && ValueMetaInterface.TYPE_NUMBER == meta2.getType() ) {
      // BACKLOG-18738
      // compare Double to Integer
      return -meta2.compare( data2, meta2.convertData( this, data1 ) );
    }

    // If the data types are not the same, the first one is the driver...
    // The second data type is converted to the first one.
    //
    return compare( data1, convertData( meta2, data2 ) );
  } catch ( Exception e ) {
    throw new KettleValueException(
        toStringMeta() + " : Unable to compare with value [" + meta2.toStringMeta() + "]", e );
  }
}
 
Example 14
Source File: NeoviewDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * This method allows a database dialect to convert database specific data types to Kettle data types.
 *
 * @param resultSet
 *          The result set to use
 * @param valueMeta
 *          The description of the value to retrieve
 * @param index
 *          the index on which we need to retrieve the value, 0-based.
 * @return The correctly converted Kettle data type corresponding to the valueMeta description.
 * @throws KettleDatabaseException
 */
@Override
public Object getValueFromResultSet( ResultSet rs, ValueMetaInterface val, int i ) throws KettleDatabaseException {
  Object data = null;

  try {
    switch ( val.getType() ) {
      case ValueMetaInterface.TYPE_BOOLEAN:
        data = Boolean.valueOf( rs.getBoolean( i + 1 ) );
        break;
      case ValueMetaInterface.TYPE_NUMBER:
        data = new Double( rs.getDouble( i + 1 ) );
        break;
      case ValueMetaInterface.TYPE_BIGNUMBER:
        data = rs.getBigDecimal( i + 1 );
        break;
      case ValueMetaInterface.TYPE_INTEGER:
        data = Long.valueOf( rs.getLong( i + 1 ) );
        break;
      case ValueMetaInterface.TYPE_STRING:
        if ( val.isStorageBinaryString() ) {
          data = rs.getBytes( i + 1 );
        } else {
          data = rs.getString( i + 1 );
        }
        break;
      case ValueMetaInterface.TYPE_BINARY:
        if ( supportsGetBlob() ) {
          Blob blob = rs.getBlob( i + 1 );
          if ( blob != null ) {
            data = blob.getBytes( 1L, (int) blob.length() );
          } else {
            data = null;
          }
        } else {
          data = rs.getBytes( i + 1 );
        }
        break;
      case ValueMetaInterface.TYPE_TIMESTAMP:
      case ValueMetaInterface.TYPE_DATE:
        if ( val.getOriginalColumnType() == java.sql.Types.TIME ) {
          // Neoview can not handle getDate / getTimestamp for a Time column
          data = rs.getTime( i + 1 );
          break; // Time is a subclass of java.util.Date, the default date
                 // will be 1970-01-01
        } else if ( val.getPrecision() != 1 && supportsTimeStampToDateConversion() ) {
          data = rs.getTimestamp( i + 1 );
          break; // Timestamp extends java.util.Date
        } else {
          data = rs.getDate( i + 1 );
          break;
        }
      default:
        break;
    }
    if ( rs.wasNull() ) {
      data = null;
    }
  } catch ( SQLException e ) {
    throw new KettleDatabaseException( "Unable to get value '"
      + val.toStringMeta() + "' from database resultset, index " + i, e );
  }

  return data;
}