Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#getBinary()

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#getBinary() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RowForumulaContext.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static Object getPrimitive( ValueMetaInterface valueMeta, Object valueData ) throws KettleValueException {
  switch ( valueMeta.getType() ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return valueMeta.getBigNumber( valueData );
    case ValueMetaInterface.TYPE_BINARY:
      return valueMeta.getBinary( valueData );
    case ValueMetaInterface.TYPE_BOOLEAN:
      return valueMeta.getBoolean( valueData );
    case ValueMetaInterface.TYPE_DATE:
      return valueMeta.getDate( valueData );
    case ValueMetaInterface.TYPE_INTEGER:
      return valueMeta.getInteger( valueData );
    case ValueMetaInterface.TYPE_NUMBER:
      return valueMeta.getNumber( valueData );
      // case ValueMetaInterface.TYPE_SERIALIZABLE: return valueMeta.(valueData);
    case ValueMetaInterface.TYPE_STRING:
      return valueMeta.getString( valueData );
    default:
      return null;
  }
}
 
Example 2
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Convert the specified data to the data type specified in this object.
 *
 * @param meta2
 *          the metadata of the object to be converted
 * @param data2
 *          the data of the object to be converted
 * @return the object in the data type of this value metadata object
 * @throws KettleValueException
 *           in case there is a data conversion error
 */
@Override
public Object convertData( ValueMetaInterface meta2, Object data2 ) throws KettleValueException {
  switch ( getType() ) {
    case TYPE_NONE:
    case TYPE_STRING:
      return meta2.getString( data2 );
    case TYPE_NUMBER:
      return meta2.getNumber( data2 );
    case TYPE_INTEGER:
      return meta2.getInteger( data2 );
    case TYPE_DATE:
      return meta2.getDate( data2 );
    case TYPE_BIGNUMBER:
      return meta2.getBigNumber( data2 );
    case TYPE_BOOLEAN:
      return meta2.getBoolean( data2 );
    case TYPE_BINARY:
      return meta2.getBinary( data2 );
    default:
      throw new KettleValueException( toString() + " : I can't convert the specified value to data type : "
          + getType() );
  }
}
 
Example 3
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Convert the specified data to the data type specified in this object. For String conversion, be compatible with
 * version 2.5.2.
 *
 * @param meta2
 *          the metadata of the object to be converted
 * @param data2
 *          the data of the object to be converted
 * @return the object in the data type of this value metadata object
 * @throws KettleValueException
 *           in case there is a data conversion error
 */
@Override
public Object convertDataCompatible( ValueMetaInterface meta2, Object data2 ) throws KettleValueException {
  switch ( getType() ) {
    case TYPE_STRING:
      return meta2.getCompatibleString( data2 );
    case TYPE_NUMBER:
      return meta2.getNumber( data2 );
    case TYPE_INTEGER:
      return meta2.getInteger( data2 );
    case TYPE_DATE:
      return meta2.getDate( data2 );
    case TYPE_BIGNUMBER:
      return meta2.getBigNumber( data2 );
    case TYPE_BOOLEAN:
      return meta2.getBoolean( data2 );
    case TYPE_BINARY:
      return meta2.getBinary( data2 );
    default:
      throw new KettleValueException( toString() + " : I can't convert the specified value to data type : "
          + getType() );
  }
}
 
Example 4
Source File: KettleTypeToBytesWritableConverter.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
@Override
public BytesWritable convert( ValueMetaInterface meta, Object obj ) throws TypeConversionException {
  try {
    BytesWritable result = new BytesWritable();
    byte[] binary = meta.getBinary( obj );
    result.set( binary, 0, binary.length );
    return result;
  } catch ( Exception ex ) {
    throw new TypeConversionException( BaseMessages
      .getString( TypeConverterFactory.class, "ErrorConverting", BytesWritable.class.getSimpleName(), obj ), ex );
  }
}
 
Example 5
Source File: HBaseValueMeta.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
/**
 * Encode a keyValue (with associated meta data) to an array of bytes with respect to the key type specified in a
 * mapping.
 *
 * @param keyValue the key value (object) to encode
 * @param keyMeta  meta data about the key value
 * @param keyType  the target type of the encoded key value
 * @return the key encoded as an array of bytes
 * @throws KettleException if something goes wrong
 */
public static byte[] encodeKeyValue( Object keyValue,
                                     ValueMetaInterface keyMeta, Mapping.KeyType keyType,
                                     HBaseBytesUtilShim bytesUtil ) throws KettleException {

  byte[] result = null;

  switch ( keyType ) {
    case STRING:
      String stringKey = keyMeta.getString( keyValue );
      result = encodeKeyValue( stringKey, keyType, bytesUtil );
      break;
    case DATE:
    case UNSIGNED_DATE:
      Date dateKey = keyMeta.getDate( keyValue );
      if ( keyType == Mapping.KeyType.UNSIGNED_DATE && dateKey.getTime() < 0 ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnsignedDate" ) );
      }
      result = encodeKeyValue( dateKey, keyType, bytesUtil );
      break;
    case INTEGER:
    case UNSIGNED_INTEGER:
      int keyInt = keyMeta.getInteger( keyValue ).intValue();
      if ( keyType == Mapping.KeyType.UNSIGNED_INTEGER && keyInt < 0 ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnsignedIngteger" ) );
      }
      result = encodeKeyValue( new Integer( keyInt ), keyType, bytesUtil );
      break;
    case LONG:
    case UNSIGNED_LONG:
      long keyLong = keyMeta.getInteger( keyValue ).longValue();
      if ( keyType == Mapping.KeyType.UNSIGNED_LONG && keyLong < 0 ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnsignedLong" ) );
      }
      result = encodeKeyValue( new Long( keyLong ), keyType, bytesUtil );
      break;

    case BINARY:
      byte[] keyBinary = keyMeta.getBinary( keyValue );
      result = encodeKeyValue( keyBinary, keyType, bytesUtil );
  }

  if ( result == null ) {
    throw new KettleException( BaseMessages.getString( PKG,
      "HBaseValueMeta.Error.UnknownTableKeyType" ) );
  }

  return result;
}
 
Example 6
Source File: HBaseValueMeta.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
public static byte[] encodeColumnValue( Object columnValue,
                                        ValueMetaInterface colMeta, HBaseValueMeta mappingColMeta,
                                        HBaseBytesUtilShim bytesUtil ) throws KettleException {

  byte[] encoded = null;
  switch ( mappingColMeta.getType() ) {
    case TYPE_STRING:
      String toEncode = colMeta.getString( columnValue );
      encoded = bytesUtil.toBytes( toEncode );
      break;
    case TYPE_INTEGER:
      Long l = colMeta.getInteger( columnValue );
      if ( mappingColMeta.getIsLongOrDouble() ) {
        encoded = bytesUtil.toBytes( l.longValue() );
      } else {
        encoded = bytesUtil.toBytes( l.intValue() );
      }
      break;
    case TYPE_NUMBER:
      Double d = colMeta.getNumber( columnValue );
      if ( mappingColMeta.getIsLongOrDouble() ) {
        encoded = bytesUtil.toBytes( d.doubleValue() );
      } else {
        encoded = bytesUtil.toBytes( d.floatValue() );
      }
      break;
    case TYPE_DATE:
      Date date = colMeta.getDate( columnValue );
      encoded = bytesUtil.toBytes( date.getTime() );
      break;
    case TYPE_BOOLEAN:
      Boolean b = colMeta.getBoolean( columnValue );
      String boolString = ( b.booleanValue() ) ? "Y" : "N";
      encoded = bytesUtil.toBytes( boolString );
      break;
    case TYPE_BIGNUMBER:
      BigDecimal bd = colMeta.getBigNumber( columnValue );
      String bds = bd.toString();
      encoded = bytesUtil.toBytes( bds );
      break;
    case TYPE_SERIALIZABLE:
      try {
        encoded = encodeObject( columnValue );
      } catch ( IOException e ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnableToSerialize", colMeta.getName() ), e );
      }
      break;
    case TYPE_BINARY:
      encoded = colMeta.getBinary( columnValue );
      break;
  }

  if ( encoded == null ) {
    throw new KettleException( BaseMessages.getString( PKG,
      "HBaseValueMeta.Error.UnknownTypeForColumn" ) );
  }

  return encoded;
}
 
Example 7
Source File: AccessOutputMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public static Object[] createObjectsForRow( RowMetaInterface rowMeta, Object[] rowData ) throws KettleValueException {
  Object[] values = new Object[rowMeta.size()];
  for ( int i = 0; i < rowMeta.size(); i++ ) {
    ValueMetaInterface valueMeta = rowMeta.getValueMeta( i );
    Object valueData = rowData[i];

    // Prevent a NullPointerException below
    if ( valueData == null || valueMeta == null ) {
      values[i] = null;
      continue;
    }

    int length = valueMeta.getLength();

    switch ( valueMeta.getType() ) {
      case ValueMetaInterface.TYPE_INTEGER:
        if ( length < 3 ) {
          values[i] = new Byte( valueMeta.getInteger( valueData ).byteValue() );
        } else {
          if ( length < 5 ) {
            values[i] = new Short( valueMeta.getInteger( valueData ).shortValue() );
          } else {
            values[i] = valueMeta.getInteger( valueData );
          }
        }
        break;
      case ValueMetaInterface.TYPE_NUMBER:
        values[i] = valueMeta.getNumber( valueData );
        break;
      case ValueMetaInterface.TYPE_DATE:
        values[i] = valueMeta.getDate( valueData );
        break;
      case ValueMetaInterface.TYPE_STRING:
        values[i] = valueMeta.getString( valueData );
        break;
      case ValueMetaInterface.TYPE_BINARY:
        values[i] = valueMeta.getBinary( valueData );
        break;
      case ValueMetaInterface.TYPE_BOOLEAN:
        values[i] = valueMeta.getBoolean( valueData );
        break;
      case ValueMetaInterface.TYPE_BIGNUMBER:
        values[i] = valueMeta.getNumber( valueData );
        break;
      default:
        break;
    }
  }
  return values;
}
 
Example 8
Source File: AddXML.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private String formatField( ValueMetaInterface valueMeta, Object valueData, XMLField field )
  throws KettleValueException {
  String retval = "";
  if ( field == null ) {
    return "";
  }

  if ( valueMeta == null || valueMeta.isNull( valueData ) ) {
    String defaultNullValue = field.getNullString();
    return Utils.isEmpty( defaultNullValue ) ? "" : defaultNullValue;
  }

  if ( valueMeta.isNumeric() ) {
    // Formatting
    if ( !Utils.isEmpty( field.getFormat() ) ) {
      data.df.applyPattern( field.getFormat() );
    } else {
      data.df.applyPattern( data.defaultDecimalFormat.toPattern() );
    }
    // Decimal
    if ( !Utils.isEmpty( field.getDecimalSymbol() ) ) {
      data.dfs.setDecimalSeparator( field.getDecimalSymbol().charAt( 0 ) );
    } else {
      data.dfs.setDecimalSeparator( data.defaultDecimalFormatSymbols.getDecimalSeparator() );
    }
    // Grouping
    if ( !Utils.isEmpty( field.getGroupingSymbol() ) ) {
      data.dfs.setGroupingSeparator( field.getGroupingSymbol().charAt( 0 ) );
    } else {
      data.dfs.setGroupingSeparator( data.defaultDecimalFormatSymbols.getGroupingSeparator() );
    }
    // Currency symbol
    if ( !Utils.isEmpty( field.getCurrencySymbol() ) ) {
      data.dfs.setCurrencySymbol( field.getCurrencySymbol() );
    } else {
      data.dfs.setCurrencySymbol( data.defaultDecimalFormatSymbols.getCurrencySymbol() );
    }

    data.df.setDecimalFormatSymbols( data.dfs );

    if ( valueMeta.isBigNumber() ) {
      retval = data.df.format( valueMeta.getBigNumber( valueData ) );
    } else if ( valueMeta.isNumber() ) {
      retval = data.df.format( valueMeta.getNumber( valueData ) );
    } else {
      // Integer
      retval = data.df.format( valueMeta.getInteger( valueData ) );
    }
  } else if ( valueMeta.isDate() ) {
    if ( field != null && !Utils.isEmpty( field.getFormat() ) && valueMeta.getDate( valueData ) != null ) {
      if ( !Utils.isEmpty( field.getFormat() ) ) {
        data.daf.applyPattern( field.getFormat() );
      } else {
        data.daf.applyPattern( data.defaultDateFormat.toLocalizedPattern() );
      }
      data.daf.setDateFormatSymbols( data.dafs );
      retval = data.daf.format( valueMeta.getDate( valueData ) );
    } else {
      if ( valueMeta.isNull( valueData ) ) {
        if ( field != null && !Utils.isEmpty( field.getNullString() ) ) {
          retval = field.getNullString();
        }
      } else {
        retval = valueMeta.getString( valueData );
      }
    }
  } else if ( valueMeta.isString() ) {
    retval = valueMeta.getString( valueData );
  } else if ( valueMeta.isBinary() ) {
    if ( valueMeta.isNull( valueData ) ) {
      if ( !Utils.isEmpty( field.getNullString() ) ) {
        retval = field.getNullString();
      } else {
        retval = Const.NULL_BINARY;
      }
    } else {
      try {
        retval = new String( valueMeta.getBinary( valueData ), "UTF-8" );
      } catch ( UnsupportedEncodingException e ) {
        // chances are small we'll get here. UTF-8 is
        // mandatory.
        retval = Const.NULL_BINARY;
      }
    }
  } else {
    // Boolean
    retval = valueMeta.getString( valueData );
  }

  return retval;
}