Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#getDate()

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#getDate() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RowForumulaContext.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static Object getPrimitive( ValueMetaInterface valueMeta, Object valueData ) throws KettleValueException {
  switch ( valueMeta.getType() ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return valueMeta.getBigNumber( valueData );
    case ValueMetaInterface.TYPE_BINARY:
      return valueMeta.getBinary( valueData );
    case ValueMetaInterface.TYPE_BOOLEAN:
      return valueMeta.getBoolean( valueData );
    case ValueMetaInterface.TYPE_DATE:
      return valueMeta.getDate( valueData );
    case ValueMetaInterface.TYPE_INTEGER:
      return valueMeta.getInteger( valueData );
    case ValueMetaInterface.TYPE_NUMBER:
      return valueMeta.getNumber( valueData );
      // case ValueMetaInterface.TYPE_SERIALIZABLE: return valueMeta.(valueData);
    case ValueMetaInterface.TYPE_STRING:
      return valueMeta.getString( valueData );
    default:
      return null;
  }
}
 
Example 2
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Convert the specified data to the data type specified in this object.
 *
 * @param meta2
 *          the metadata of the object to be converted
 * @param data2
 *          the data of the object to be converted
 * @return the object in the data type of this value metadata object
 * @throws KettleValueException
 *           in case there is a data conversion error
 */
@Override
public Object convertData( ValueMetaInterface meta2, Object data2 ) throws KettleValueException {
  switch ( getType() ) {
    case TYPE_NONE:
    case TYPE_STRING:
      return meta2.getString( data2 );
    case TYPE_NUMBER:
      return meta2.getNumber( data2 );
    case TYPE_INTEGER:
      return meta2.getInteger( data2 );
    case TYPE_DATE:
      return meta2.getDate( data2 );
    case TYPE_BIGNUMBER:
      return meta2.getBigNumber( data2 );
    case TYPE_BOOLEAN:
      return meta2.getBoolean( data2 );
    case TYPE_BINARY:
      return meta2.getBinary( data2 );
    default:
      throw new KettleValueException( toString() + " : I can't convert the specified value to data type : "
          + getType() );
  }
}
 
Example 3
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
/**
 * Convert the specified data to the data type specified in this object. For String conversion, be compatible with
 * version 2.5.2.
 *
 * @param meta2
 *          the metadata of the object to be converted
 * @param data2
 *          the data of the object to be converted
 * @return the object in the data type of this value metadata object
 * @throws KettleValueException
 *           in case there is a data conversion error
 */
@Override
public Object convertDataCompatible( ValueMetaInterface meta2, Object data2 ) throws KettleValueException {
  switch ( getType() ) {
    case TYPE_STRING:
      return meta2.getCompatibleString( data2 );
    case TYPE_NUMBER:
      return meta2.getNumber( data2 );
    case TYPE_INTEGER:
      return meta2.getInteger( data2 );
    case TYPE_DATE:
      return meta2.getDate( data2 );
    case TYPE_BIGNUMBER:
      return meta2.getBigNumber( data2 );
    case TYPE_BOOLEAN:
      return meta2.getBoolean( data2 );
    case TYPE_BINARY:
      return meta2.getBinary( data2 );
    default:
      throw new KettleValueException( toString() + " : I can't convert the specified value to data type : "
          + getType() );
  }
}
 
Example 4
Source File: ValueMetaStringTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetDateWithoutConversionMask() throws KettleValueException, ParseException {
  Calendar date = new GregorianCalendar( 2017, 9, 20 ); // month 9 = Oct
  String value = "2017/10/20 00:00:00.000";
  ValueMetaInterface stringValueMeta = new ValueMetaString( "test" );

  Date expected = Date.from( date.toInstant() );
  Date result = stringValueMeta.getDate( value );
  assertEquals( expected, result );
}
 
Example 5
Source File: KettleToBQTableRowFn.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
@Override public TableRow apply( KettleRow inputRow ) {

    try {
      if ( rowMeta == null ) {
        readCounter = Metrics.counter( "read", counterName );
        outputCounter = Metrics.counter( "output", counterName );
        errorCounter = Metrics.counter( "error", counterName );

        // Initialize Kettle Beam
        //
        BeamKettle.init( stepPluginClasses, xpPluginClasses );
        rowMeta = JsonRowMeta.fromJson( rowMetaJson );

        simpleDateFormat = new SimpleDateFormat( "yyyy-MM-dd HH:mm:ss.SSS" );
        Metrics.counter( "init", counterName ).inc();
      }

      readCounter.inc();

      TableRow tableRow = new TableRow();
      for (int i=0;i<rowMeta.size();i++) {
        ValueMetaInterface valueMeta = rowMeta.getValueMeta( i );
        Object valueData = inputRow.getRow()[i];
        if (!valueMeta.isNull( valueData )) {
          switch ( valueMeta.getType() ) {
            case ValueMetaInterface.TYPE_STRING: tableRow.put( valueMeta.getName(), valueMeta.getString( valueData ) ); break;
            case ValueMetaInterface.TYPE_INTEGER: tableRow.put( valueMeta.getName(), valueMeta.getInteger( valueData ) ); break;
            case ValueMetaInterface.TYPE_DATE:
              Date date = valueMeta.getDate( valueData );
              String formattedDate = simpleDateFormat.format( date );
              tableRow.put( valueMeta.getName(), formattedDate);
              break;
            case ValueMetaInterface.TYPE_BOOLEAN: tableRow.put( valueMeta.getName(), valueMeta.getBoolean( valueData ) ); break;
            case ValueMetaInterface.TYPE_NUMBER: tableRow.put( valueMeta.getName(), valueMeta.getNumber( valueData ) ); break;
            default:
              throw new RuntimeException( "Data type conversion from Kettle to BigQuery TableRow not supported yet: " +valueMeta.toString());
          }
        }
      }

      // Pass the row to the process context
      //
      outputCounter.inc();

      return tableRow;

    } catch ( Exception e ) {
      errorCounter.inc();
      LOG.info( "Conversion error KettleRow to BigQuery TableRow : " + e.getMessage() );
      throw new RuntimeException( "Error converting KettleRow to BigQuery TableRow", e );
    }
  }
 
Example 6
Source File: HBaseValueMeta.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
/**
 * Encode a keyValue (with associated meta data) to an array of bytes with respect to the key type specified in a
 * mapping.
 *
 * @param keyValue the key value (object) to encode
 * @param keyMeta  meta data about the key value
 * @param keyType  the target type of the encoded key value
 * @return the key encoded as an array of bytes
 * @throws KettleException if something goes wrong
 */
public static byte[] encodeKeyValue( Object keyValue,
                                     ValueMetaInterface keyMeta, Mapping.KeyType keyType,
                                     HBaseBytesUtilShim bytesUtil ) throws KettleException {

  byte[] result = null;

  switch ( keyType ) {
    case STRING:
      String stringKey = keyMeta.getString( keyValue );
      result = encodeKeyValue( stringKey, keyType, bytesUtil );
      break;
    case DATE:
    case UNSIGNED_DATE:
      Date dateKey = keyMeta.getDate( keyValue );
      if ( keyType == Mapping.KeyType.UNSIGNED_DATE && dateKey.getTime() < 0 ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnsignedDate" ) );
      }
      result = encodeKeyValue( dateKey, keyType, bytesUtil );
      break;
    case INTEGER:
    case UNSIGNED_INTEGER:
      int keyInt = keyMeta.getInteger( keyValue ).intValue();
      if ( keyType == Mapping.KeyType.UNSIGNED_INTEGER && keyInt < 0 ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnsignedIngteger" ) );
      }
      result = encodeKeyValue( new Integer( keyInt ), keyType, bytesUtil );
      break;
    case LONG:
    case UNSIGNED_LONG:
      long keyLong = keyMeta.getInteger( keyValue ).longValue();
      if ( keyType == Mapping.KeyType.UNSIGNED_LONG && keyLong < 0 ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnsignedLong" ) );
      }
      result = encodeKeyValue( new Long( keyLong ), keyType, bytesUtil );
      break;

    case BINARY:
      byte[] keyBinary = keyMeta.getBinary( keyValue );
      result = encodeKeyValue( keyBinary, keyType, bytesUtil );
  }

  if ( result == null ) {
    throw new KettleException( BaseMessages.getString( PKG,
      "HBaseValueMeta.Error.UnknownTableKeyType" ) );
  }

  return result;
}
 
Example 7
Source File: HBaseValueMeta.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
public static byte[] encodeColumnValue( Object columnValue,
                                        ValueMetaInterface colMeta, HBaseValueMeta mappingColMeta,
                                        HBaseBytesUtilShim bytesUtil ) throws KettleException {

  byte[] encoded = null;
  switch ( mappingColMeta.getType() ) {
    case TYPE_STRING:
      String toEncode = colMeta.getString( columnValue );
      encoded = bytesUtil.toBytes( toEncode );
      break;
    case TYPE_INTEGER:
      Long l = colMeta.getInteger( columnValue );
      if ( mappingColMeta.getIsLongOrDouble() ) {
        encoded = bytesUtil.toBytes( l.longValue() );
      } else {
        encoded = bytesUtil.toBytes( l.intValue() );
      }
      break;
    case TYPE_NUMBER:
      Double d = colMeta.getNumber( columnValue );
      if ( mappingColMeta.getIsLongOrDouble() ) {
        encoded = bytesUtil.toBytes( d.doubleValue() );
      } else {
        encoded = bytesUtil.toBytes( d.floatValue() );
      }
      break;
    case TYPE_DATE:
      Date date = colMeta.getDate( columnValue );
      encoded = bytesUtil.toBytes( date.getTime() );
      break;
    case TYPE_BOOLEAN:
      Boolean b = colMeta.getBoolean( columnValue );
      String boolString = ( b.booleanValue() ) ? "Y" : "N";
      encoded = bytesUtil.toBytes( boolString );
      break;
    case TYPE_BIGNUMBER:
      BigDecimal bd = colMeta.getBigNumber( columnValue );
      String bds = bd.toString();
      encoded = bytesUtil.toBytes( bds );
      break;
    case TYPE_SERIALIZABLE:
      try {
        encoded = encodeObject( columnValue );
      } catch ( IOException e ) {
        throw new KettleException( BaseMessages.getString( PKG,
          "HBaseValueMeta.Error.UnableToSerialize", colMeta.getName() ), e );
      }
      break;
    case TYPE_BINARY:
      encoded = colMeta.getBinary( columnValue );
      break;
  }

  if ( encoded == null ) {
    throw new KettleException( BaseMessages.getString( PKG,
      "HBaseValueMeta.Error.UnknownTypeForColumn" ) );
  }

  return encoded;
}
 
Example 8
Source File: DimensionCache.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Compare 2 rows of data using the natural keys and indexes specified.
 *
 * @param o1
 * @param o2
 * @return
 */
public int compare( Object[] o1, Object[] o2 ) {
  try {
    // First compare on the natural keys...
    //
    int cmp = rowMeta.compare( o1, o2, keyIndexes );
    if ( cmp != 0 ) {
      return cmp;
    }

    // Then see if the start of the date range of o2 falls between the start and end of o2
    //
    ValueMetaInterface fromDateMeta = rowMeta.getValueMeta( fromDateIndex );
    ValueMetaInterface toDateMeta = rowMeta.getValueMeta( toDateIndex );

    Date fromDate = fromDateMeta.getDate( o1[fromDateIndex] );
    Date toDate = toDateMeta.getDate( o1[toDateIndex] );
    Date lookupDate = fromDateMeta.getDate( o2[fromDateIndex] );

    int fromCmpLookup = 0;
    if ( fromDate == null ) {
      if ( lookupDate == null ) {
        fromCmpLookup = 0;
      } else {
        fromCmpLookup = -1;
      }
    } else {
      if ( lookupDate == null ) {
        fromCmpLookup = 1;
      } else {
        fromCmpLookup = fromDateMeta.compare( fromDate, lookupDate );
      }
    }
    if ( fromCmpLookup < 0 ) {
      if ( toDate != null ) {
        int toCmpLookup = toDateMeta.compare( toDate, lookupDate );
        if ( toCmpLookup > 0 ) {
          return 0;
        }
      }
    }
    return fromCmpLookup;
  } catch ( Exception e ) {
    throw new RuntimeException( e );
  }
}
 
Example 9
Source File: BaseDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getSQLValue( ValueMetaInterface valueMeta, Object valueData, String dateFormat ) throws KettleValueException {

  StringBuilder ins = new StringBuilder();

  if ( valueMeta.isNull( valueData ) ) {
    ins.append( "null" );
  } else {
    // Normal cases...
    //
    switch ( valueMeta.getType() ) {
      case ValueMetaInterface.TYPE_BOOLEAN:
      case ValueMetaInterface.TYPE_STRING:
        String string = valueMeta.getString( valueData );
        // Have the database dialect do the quoting.
        // This also adds the single quotes around the string (thanks to PostgreSQL)
        //
        string = quoteSQLString( string );
        ins.append( string );
        break;
      case ValueMetaInterface.TYPE_DATE:
        Date date = valueMeta.getDate( valueData );

        if ( Utils.isEmpty( dateFormat ) ) {
          ins.append( "'" + valueMeta.getString( valueData ) + "'" );
        } else {
          try {
            java.text.SimpleDateFormat formatter = new java.text.SimpleDateFormat( dateFormat );
            ins.append( "'" + formatter.format( date ) + "'" );
          } catch ( Exception e ) {
            throw new KettleValueException( "Error : ", e );
          }
        }
        break;
      default:
        ins.append( valueMeta.getString( valueData ) );
        break;
    }
  }

  return ins.toString();
}
 
Example 10
Source File: AccessOutputMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public static Object[] createObjectsForRow( RowMetaInterface rowMeta, Object[] rowData ) throws KettleValueException {
  Object[] values = new Object[rowMeta.size()];
  for ( int i = 0; i < rowMeta.size(); i++ ) {
    ValueMetaInterface valueMeta = rowMeta.getValueMeta( i );
    Object valueData = rowData[i];

    // Prevent a NullPointerException below
    if ( valueData == null || valueMeta == null ) {
      values[i] = null;
      continue;
    }

    int length = valueMeta.getLength();

    switch ( valueMeta.getType() ) {
      case ValueMetaInterface.TYPE_INTEGER:
        if ( length < 3 ) {
          values[i] = new Byte( valueMeta.getInteger( valueData ).byteValue() );
        } else {
          if ( length < 5 ) {
            values[i] = new Short( valueMeta.getInteger( valueData ).shortValue() );
          } else {
            values[i] = valueMeta.getInteger( valueData );
          }
        }
        break;
      case ValueMetaInterface.TYPE_NUMBER:
        values[i] = valueMeta.getNumber( valueData );
        break;
      case ValueMetaInterface.TYPE_DATE:
        values[i] = valueMeta.getDate( valueData );
        break;
      case ValueMetaInterface.TYPE_STRING:
        values[i] = valueMeta.getString( valueData );
        break;
      case ValueMetaInterface.TYPE_BINARY:
        values[i] = valueMeta.getBinary( valueData );
        break;
      case ValueMetaInterface.TYPE_BOOLEAN:
        values[i] = valueMeta.getBoolean( valueData );
        break;
      case ValueMetaInterface.TYPE_BIGNUMBER:
        values[i] = valueMeta.getNumber( valueData );
        break;
      default:
        break;
    }
  }
  return values;
}
 
Example 11
Source File: LucidDBBulkLoader.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private void writeRowToBulk( RowMetaInterface rowMeta, Object[] r ) throws KettleException {

    try {
      // So, we have this output stream to which we can write CSV data to.
      // Basically, what we need to do is write the binary data (from strings to it as part of this proof of concept)
      //
      // The data format required is essentially:
      //
      for ( int i = 0; i < data.keynrs.length; i++ ) {
        if ( i > 0 ) {
          // Write a separator
          //
          data.fifoStream.write( data.separator );
        }

        int index = data.keynrs[i];
        ValueMetaInterface valueMeta = rowMeta.getValueMeta( index );
        Object valueData = r[index];

        if ( valueData != null ) {
          switch ( valueMeta.getType() ) {
            case ValueMetaInterface.TYPE_STRING:
              data.fifoStream.write( data.quote );
              if ( valueMeta.isStorageBinaryString() && meta.getFieldFormatOk()[i] ) {
                // We had a string, just dump it back.
                data.fifoStream.write( (byte[]) valueData );
              } else {
                data.fifoStream.write( valueMeta.getString( valueData ).getBytes() );
              }
              data.fifoStream.write( data.quote );
              break;
            case ValueMetaInterface.TYPE_INTEGER:
              if ( valueMeta.isStorageBinaryString() && meta.getFieldFormatOk()[i] ) {
                data.fifoStream.write( (byte[]) valueData );
              } else {
                data.fifoStream.write( Long.toString( valueMeta.getInteger( valueData ) ).getBytes() );
              }
              break;
            case ValueMetaInterface.TYPE_DATE:
              // REVIEW jvs 13-Dec-2008: Is it OK to ignore
              // FieldFormatOk like this?
              /*
               * if (false && valueMeta.isStorageBinaryString() && meta.getFieldFormatOk()[i]) {
               * data.fifoStream.write((byte[])valueData); } else {
               */
              Date date = valueMeta.getDate( valueData );
              // Convert it to the ISO timestamp format
              // "yyyy-MM-dd HH:mm:ss" // or date format
              // "yyyy-MM-dd" as appropriate, since LucidDB
              // follows SQL:2003 here
              data.fifoStream.write( data.bulkFormatMeta[i].getString( date ).getBytes() );
              // }
              break;
            case ValueMetaInterface.TYPE_BOOLEAN:
              if ( valueMeta.isStorageBinaryString() && meta.getFieldFormatOk()[i] ) {
                data.fifoStream.write( (byte[]) valueData );
              } else {
                data.fifoStream.write( Boolean.toString( valueMeta.getBoolean( valueData ) ).getBytes() );
              }
              break;
            case ValueMetaInterface.TYPE_NUMBER:
              if ( valueMeta.isStorageBinaryString() && meta.getFieldFormatOk()[i] ) {
                data.fifoStream.write( (byte[]) valueData );
              } else {
                data.fifoStream.write( Double.toString( valueMeta.getNumber( valueData ) ).getBytes() );
              }
              break;
            case ValueMetaInterface.TYPE_BIGNUMBER:
              if ( valueMeta.isStorageBinaryString() && meta.getFieldFormatOk()[i] ) {
                data.fifoStream.write( (byte[]) valueData );
              } else {
                data.fifoStream.write( valueMeta.getString( valueData ).getBytes() );
              }
              break;
            default:
              break;
          }
        }
      }

      // finally write a newline
      //
      data.fifoStream.write( data.newline );
    } catch ( Exception e ) {
      throw new KettleException( "Error serializing rows of data to the fifo file", e );
    }

  }
 
Example 12
Source File: AddXML.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private String formatField( ValueMetaInterface valueMeta, Object valueData, XMLField field )
  throws KettleValueException {
  String retval = "";
  if ( field == null ) {
    return "";
  }

  if ( valueMeta == null || valueMeta.isNull( valueData ) ) {
    String defaultNullValue = field.getNullString();
    return Utils.isEmpty( defaultNullValue ) ? "" : defaultNullValue;
  }

  if ( valueMeta.isNumeric() ) {
    // Formatting
    if ( !Utils.isEmpty( field.getFormat() ) ) {
      data.df.applyPattern( field.getFormat() );
    } else {
      data.df.applyPattern( data.defaultDecimalFormat.toPattern() );
    }
    // Decimal
    if ( !Utils.isEmpty( field.getDecimalSymbol() ) ) {
      data.dfs.setDecimalSeparator( field.getDecimalSymbol().charAt( 0 ) );
    } else {
      data.dfs.setDecimalSeparator( data.defaultDecimalFormatSymbols.getDecimalSeparator() );
    }
    // Grouping
    if ( !Utils.isEmpty( field.getGroupingSymbol() ) ) {
      data.dfs.setGroupingSeparator( field.getGroupingSymbol().charAt( 0 ) );
    } else {
      data.dfs.setGroupingSeparator( data.defaultDecimalFormatSymbols.getGroupingSeparator() );
    }
    // Currency symbol
    if ( !Utils.isEmpty( field.getCurrencySymbol() ) ) {
      data.dfs.setCurrencySymbol( field.getCurrencySymbol() );
    } else {
      data.dfs.setCurrencySymbol( data.defaultDecimalFormatSymbols.getCurrencySymbol() );
    }

    data.df.setDecimalFormatSymbols( data.dfs );

    if ( valueMeta.isBigNumber() ) {
      retval = data.df.format( valueMeta.getBigNumber( valueData ) );
    } else if ( valueMeta.isNumber() ) {
      retval = data.df.format( valueMeta.getNumber( valueData ) );
    } else {
      // Integer
      retval = data.df.format( valueMeta.getInteger( valueData ) );
    }
  } else if ( valueMeta.isDate() ) {
    if ( field != null && !Utils.isEmpty( field.getFormat() ) && valueMeta.getDate( valueData ) != null ) {
      if ( !Utils.isEmpty( field.getFormat() ) ) {
        data.daf.applyPattern( field.getFormat() );
      } else {
        data.daf.applyPattern( data.defaultDateFormat.toLocalizedPattern() );
      }
      data.daf.setDateFormatSymbols( data.dafs );
      retval = data.daf.format( valueMeta.getDate( valueData ) );
    } else {
      if ( valueMeta.isNull( valueData ) ) {
        if ( field != null && !Utils.isEmpty( field.getNullString() ) ) {
          retval = field.getNullString();
        }
      } else {
        retval = valueMeta.getString( valueData );
      }
    }
  } else if ( valueMeta.isString() ) {
    retval = valueMeta.getString( valueData );
  } else if ( valueMeta.isBinary() ) {
    if ( valueMeta.isNull( valueData ) ) {
      if ( !Utils.isEmpty( field.getNullString() ) ) {
        retval = field.getNullString();
      } else {
        retval = Const.NULL_BINARY;
      }
    } else {
      try {
        retval = new String( valueMeta.getBinary( valueData ), "UTF-8" );
      } catch ( UnsupportedEncodingException e ) {
        // chances are small we'll get here. UTF-8 is
        // mandatory.
        retval = Const.NULL_BINARY;
      }
    }
  } else {
    // Boolean
    retval = valueMeta.getString( valueData );
  }

  return retval;
}