Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#TYPE_BINARY

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#TYPE_BINARY . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RowForumulaContext.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static Object getPrimitive( ValueMetaInterface valueMeta, Object valueData ) throws KettleValueException {
  switch ( valueMeta.getType() ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return valueMeta.getBigNumber( valueData );
    case ValueMetaInterface.TYPE_BINARY:
      return valueMeta.getBinary( valueData );
    case ValueMetaInterface.TYPE_BOOLEAN:
      return valueMeta.getBoolean( valueData );
    case ValueMetaInterface.TYPE_DATE:
      return valueMeta.getDate( valueData );
    case ValueMetaInterface.TYPE_INTEGER:
      return valueMeta.getInteger( valueData );
    case ValueMetaInterface.TYPE_NUMBER:
      return valueMeta.getNumber( valueData );
      // case ValueMetaInterface.TYPE_SERIALIZABLE: return valueMeta.(valueData);
    case ValueMetaInterface.TYPE_STRING:
      return valueMeta.getString( valueData );
    default:
      return null;
  }
}
 
Example 2
Source File: TypeConverterFactory.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
/**
 * Determine the Hadoop writable type to pass Kettle type back to Hadoop as.
 *
 * @param kettleType
 * @return Java type to convert {@code kettleType} to when sending data back to Hadoop.
 */
public static Class<? extends Writable> getWritableForKettleType( ValueMetaInterface kettleType ) {
  if ( kettleType == null ) {
    return NullWritable.class;
  }
  switch ( kettleType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_DATE:
      return Text.class;
    case ValueMetaInterface.TYPE_INTEGER:
      return LongWritable.class;
    case ValueMetaInterface.TYPE_NUMBER:
      return DoubleWritable.class;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return BooleanWritable.class;
    case ValueMetaInterface.TYPE_BINARY:
      return BytesWritable.class;
    default:
      return Text.class;
  }
}
 
Example 3
Source File: MongodbInputDiscoverFieldsImpl.java    From pentaho-mongodb-plugin with Apache License 2.0 6 votes vote down vote up
protected static int mongoToKettleType( Object fieldValue ) {
  if ( fieldValue == null ) {
    return ValueMetaInterface.TYPE_STRING;
  }

  if ( fieldValue instanceof Symbol || fieldValue instanceof String || fieldValue instanceof Code
        || fieldValue instanceof ObjectId || fieldValue instanceof MinKey || fieldValue instanceof MaxKey ) {
    return ValueMetaInterface.TYPE_STRING;
  } else if ( fieldValue instanceof Date ) {
    return ValueMetaInterface.TYPE_DATE;
  } else if ( fieldValue instanceof Number ) {
    // try to parse as an Integer
    try {
      Integer.parseInt( fieldValue.toString() );
      return ValueMetaInterface.TYPE_INTEGER;
    } catch ( NumberFormatException e ) {
      return ValueMetaInterface.TYPE_NUMBER;
    }
  } else if ( fieldValue instanceof Binary ) {
    return ValueMetaInterface.TYPE_BINARY;
  } else if ( fieldValue instanceof BSONTimestamp ) {
    return ValueMetaInterface.TYPE_INTEGER;
  }

  return ValueMetaInterface.TYPE_STRING;
}
 
Example 4
Source File: YamlReader.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private int getType( Object value ) {

    if ( value instanceof Integer ) {
      return ValueMetaInterface.TYPE_INTEGER;
    }
    if ( value instanceof Double ) {
      return ValueMetaInterface.TYPE_NUMBER;
    } else if ( value instanceof Long ) {
      return ValueMetaInterface.TYPE_INTEGER;
    } else if ( value instanceof Date ) {
      return ValueMetaInterface.TYPE_DATE;
    } else if ( value instanceof java.sql.Date ) {
      return ValueMetaInterface.TYPE_DATE;
    } else if ( value instanceof Timestamp ) {
      return ValueMetaInterface.TYPE_DATE;
    } else if ( value instanceof Boolean ) {
      return ValueMetaInterface.TYPE_BOOLEAN;
    } else if ( value instanceof BigInteger ) {
      return ValueMetaInterface.TYPE_BIGNUMBER;
    } else if ( value instanceof BigDecimal ) {
      return ValueMetaInterface.TYPE_BIGNUMBER;
    } else if ( value instanceof Byte ) {
      return ValueMetaInterface.TYPE_BINARY;
    }
    return ValueMetaInterface.TYPE_STRING;
  }
 
Example 5
Source File: ValueMetaConverter.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Override
public Object convertFromSourceToTargetDataType( int sourceValueMetaType, int targetValueMetaType, Object value )
  throws ValueMetaConversionException {
  if ( value == null ) {
    return null;
  }

  switch ( sourceValueMetaType ) {
    case ValueMetaInterface.TYPE_INET:
      return convertFromInetMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_STRING:
      return convertFromStringMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_INTEGER:
      return convertFromIntegerMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_NUMBER:
      return convertFromNumberMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return convertFromBigNumberMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_TIMESTAMP:
      return convertFromTimestampMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_DATE:
      return convertFromDateMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_BOOLEAN:
      return convertFromBooleanMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_BINARY:
      return convertFromBinaryMetaInterface( targetValueMetaType, value );
    case ValueMetaInterface.TYPE_SERIALIZABLE:
      return convertFromSerializableMetaInterface( targetValueMetaType, value );
    default:
      throwBadConversionCombination( sourceValueMetaType, targetValueMetaType, value );
  }
  return null;
}
 
Example 6
Source File: OrcSchemaConverter.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
private int determineMetaType( TypeDescription subDescription ) {
  switch ( subDescription.getCategory().getName() ) {
    case "string":
    case "char":
    case "varchar":
      return ValueMetaInterface.TYPE_STRING;
    case "bigint":
    case "tinyint":
    case "smallint":
    case "int":
      return ValueMetaInterface.TYPE_INTEGER;
    case "double":
    case "float":
      return ValueMetaInterface.TYPE_NUMBER;
    case "decimal":
      return ValueMetaInterface.TYPE_BIGNUMBER;
    case "timestamp":
      return ValueMetaInterface.TYPE_TIMESTAMP;
    case "date":
      return ValueMetaInterface.TYPE_DATE;
    case "boolean":
      return ValueMetaInterface.TYPE_BOOLEAN;
    case "binary":
      return ValueMetaInterface.TYPE_BINARY;
  }
  //if none of the cases match return a -1
  return -1;
}
 
Example 7
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
/**
 * Extracts the primitive data from an old style Value object
 *
 * @param value
 *          the old style Value object
 * @return the value's data, NOT the meta data.
 * @throws KettleValueException
 *           case there is a data conversion problem
 */
@Override
public Object getValueData( Value value ) throws KettleValueException {
  if ( value == null || value.isNull() ) {
    return null;
  }

  // So far the old types and the new types map to the same thing.
  // For compatibility we just ask the old-style value to convert to the new
  // one.
  // In the old transformation this would happen sooner or later anyway.
  // It doesn't throw exceptions or complain either (unfortunately).
  //

  switch ( getType() ) {
    case ValueMetaInterface.TYPE_STRING:
      return value.getString();
    case ValueMetaInterface.TYPE_NUMBER:
      return value.getNumber();
    case ValueMetaInterface.TYPE_INTEGER:
      return value.getInteger();
    case ValueMetaInterface.TYPE_DATE:
      return value.getDate();
    case ValueMetaInterface.TYPE_BOOLEAN:
      return value.getBoolean();
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return value.getBigNumber();
    case ValueMetaInterface.TYPE_BINARY:
      return value.getBytes();
    default:
      throw new KettleValueException( toString() + " : We can't convert original data type " + value.getTypeDesc()
          + " to a primitive data type" );
  }
}
 
Example 8
Source File: AvroNestedReader.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
/**
 * @param pentahoType
 * @param avroData
 * @param fieldSchema
 * @return
 */
public Object convertToKettleValue( AvroInputField pentahoType, ByteBuffer avroData, Schema fieldSchema ) {
  Object pentahoData = null;
  if ( avroData != null ) {
    try {
      switch ( pentahoType.getPentahoType() ) {
        case ValueMetaInterface.TYPE_BIGNUMBER:
          Conversions.DecimalConversion converter = new Conversions.DecimalConversion();
          Schema schema = fieldSchema;
          if ( schema.getType().equals( Schema.Type.UNION ) ) {
            List<Schema> schemas = schema.getTypes();
            for ( Schema s : schemas ) {
              if ( !s.getName().equalsIgnoreCase( "null" ) ) {
                schema = s;
                break;
              }
            }
          }
          Object precision = schema.getObjectProp( AvroSpec.DECIMAL_PRECISION );
          Object scale = schema.getObjectProp( AvroSpec.DECIMAL_SCALE );
          LogicalTypes.Decimal decimalType =
            LogicalTypes.decimal( Integer.parseInt( precision.toString() ), Integer.parseInt( scale.toString() ) );
          pentahoData = converter.fromBytes( avroData, m_schemaToUse, decimalType );
          break;
        case ValueMetaInterface.TYPE_BINARY:
          pentahoData = new byte[ avroData.remaining() ];
          avroData.get( (byte[]) pentahoData );
          break;
      }
    } catch ( Exception e ) {
      // If unable to do the type conversion just ignore. null will be returned.
    }
  }
  return pentahoData;
}
 
Example 9
Source File: PentahoAvroRecordReader.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
private Object convertToPentahoType( int pentahoType, ByteBuffer avroData, Schema.Field field ) {
  Object pentahoData = null;
  if ( avroData != null ) {
    try {
      switch ( pentahoType ) {
        case ValueMetaInterface.TYPE_BIGNUMBER:
          Conversions.DecimalConversion converter = new Conversions.DecimalConversion();
          Schema schema = field.schema();
          if ( schema.getType().equals( Schema.Type.UNION ) ) {
            List<Schema> schemas = field.schema().getTypes();
            for ( Schema s : schemas ) {
              if ( !s.getName().equalsIgnoreCase( "null" ) ) {
                schema = s;
                break;
              }
            }
          }
          Object precision = schema.getObjectProp( AvroSpec.DECIMAL_PRECISION );
          Object scale = schema.getObjectProp( AvroSpec.DECIMAL_SCALE );
          LogicalTypes.Decimal decimalType =
            LogicalTypes.decimal( Integer.parseInt( precision.toString() ), Integer.parseInt( scale.toString() ) );
          pentahoData = converter.fromBytes( avroData, avroSchema, decimalType );
          break;
        case ValueMetaInterface.TYPE_BINARY:
          pentahoData = new byte[ avroData.remaining() ];
          avroData.get( (byte[]) pentahoData );
          break;
      }
    } catch ( Exception e ) {
      // If unable to do the type conversion just ignore. null will be returned.
    }
  }
  return pentahoData;
}
 
Example 10
Source File: SalesforceInput.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
void doConversions( Object[] outputRowData, int i, String value ) throws KettleValueException {
  ValueMetaInterface targetValueMeta = data.outputRowMeta.getValueMeta( i );
  ValueMetaInterface sourceValueMeta = data.convertRowMeta.getValueMeta( i );

  if ( ValueMetaInterface.TYPE_BINARY != targetValueMeta.getType() ) {
    outputRowData[i] = targetValueMeta.convertData( sourceValueMeta, value );
  } else {
    // binary type of salesforce requires specific conversion
    if ( value != null ) {
      outputRowData[ i ] = Base64.decode( value );
    } else {
      outputRowData[ i ] = null;
    }
  }
}
 
Example 11
Source File: PhysicalTableImporter.java    From pentaho-metadata with GNU Lesser General Public License v2.1 5 votes vote down vote up
private static DataTypeSettings getDataTypeSettings( ValueMetaInterface v ) {
  DataTypeSettings dataTypeSettings = new DataTypeSettings( DataTypeSettings.DATA_TYPE_STRING );
  switch ( v.getType() ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_NUMBER:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_NUMERIC );
      break;

    case ValueMetaInterface.TYPE_BINARY:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_BINARY );
      break;

    case ValueMetaInterface.TYPE_BOOLEAN:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_BOOLEAN );
      break;

    case ValueMetaInterface.TYPE_DATE:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_DATE );
      break;

    case ValueMetaInterface.TYPE_STRING:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_STRING );
      break;

    case ValueMetaInterface.TYPE_NONE:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_UNKNOWN );
      break;

    default:
      break;
  }
  dataTypeSettings.setLength( v.getLength() );
  dataTypeSettings.setPrecision( v.getPrecision() );

  return dataTypeSettings;
}
 
Example 12
Source File: JobGenerator.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private ValueMetaInterface getValueForLogicalColumn(DatabaseMeta databaseMeta, LogicalColumn column) {
  String columnName = ConceptUtil.getName(column, locale);
  String phColumnName = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_PHYSICAL_COLUMN_NAME);
  DataType columnType = column.getDataType();
  String lengthString = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_LENGTH);
  int length = Const.toInt(lengthString, -1);
  String precisionString = ConceptUtil.getString(column, DefaultIDs.LOGICAL_COLUMN_PRECISION);
  int precision = Const.toInt(precisionString, -1);

  int type=ValueMetaInterface.TYPE_STRING;
  switch(columnType) {
  case UNKNOWN:
  case URL:
  case STRING: precision=-1; break;
  case IMAGE:
  case BINARY: type = ValueMetaInterface.TYPE_BINARY; precision=-1; break;
  case BOOLEAN: type = ValueMetaInterface.TYPE_BOOLEAN; length=-1; precision=-1; break;
  case DATE: type = ValueMetaInterface.TYPE_DATE; length=-1; precision=-1; break;
  case NUMERIC:
    if (precision<=0 && length<15) {
      type = ValueMetaInterface.TYPE_INTEGER;
    } else {
      if (length>=15) {
        type = ValueMetaInterface.TYPE_BIGNUMBER;
      } else {
        type = ValueMetaInterface.TYPE_NUMBER;
      }
    }
    break;
    default:
      break;
  }
  ValueMetaInterface value = new ValueMeta(databaseMeta.quoteField(Const.NVL(phColumnName, columnName)), type);
  value.setLength(length, precision);
  return value;
}
 
Example 13
Source File: OrcConverter.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
protected static Object convertFromSourceToTargetDataType( ColumnVector columnVector, int currentBatchRow,
                                                           int orcValueMetaInterface ) {

  if ( columnVector.isNull[ currentBatchRow ] ) {
    return null;
  }
  switch ( orcValueMetaInterface ) {
    case ValueMetaInterface.TYPE_INET:
      try {
        return InetAddress.getByName( new String( ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ],
          ( (BytesColumnVector) columnVector ).start[ currentBatchRow ],
          ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] ) );
      } catch ( UnknownHostException e ) {
        e.printStackTrace();
      }

    case ValueMetaInterface.TYPE_STRING:
      return new String( ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ],
        ( (BytesColumnVector) columnVector ).start[ currentBatchRow ],
        ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] );

    case ValueMetaInterface.TYPE_INTEGER:
      return (long) ( (LongColumnVector) columnVector ).vector[ currentBatchRow ];

    case ValueMetaInterface.TYPE_NUMBER:
      return ( (DoubleColumnVector) columnVector ).vector[ currentBatchRow ];

    case ValueMetaInterface.TYPE_BIGNUMBER:
      HiveDecimalWritable obj = ( (DecimalColumnVector) columnVector ).vector[ currentBatchRow ];
      return obj.getHiveDecimal().bigDecimalValue();

    case ValueMetaInterface.TYPE_TIMESTAMP:
      Timestamp timestamp = new Timestamp( ( (TimestampColumnVector) columnVector ).time[ currentBatchRow ] );
      timestamp.setNanos( ( (TimestampColumnVector) columnVector ).nanos[ currentBatchRow ] );
      return timestamp;

    case ValueMetaInterface.TYPE_DATE:
      LocalDate localDate =
        LocalDate.ofEpochDay( 0 ).plusDays( ( (LongColumnVector) columnVector ).vector[ currentBatchRow ] );
      Date dateValue = Date.from( localDate.atStartOfDay( ZoneId.systemDefault() ).toInstant() );
      return dateValue;

    case ValueMetaInterface.TYPE_BOOLEAN:
      return ( (LongColumnVector) columnVector ).vector[ currentBatchRow ] == 0 ? false : true;

    case ValueMetaInterface.TYPE_BINARY:
      byte[] origBytes = ( (BytesColumnVector) columnVector ).vector[ currentBatchRow ];
      int startPos = ( (BytesColumnVector) columnVector ).start[ currentBatchRow ];
      byte[] newBytes = Arrays.copyOfRange( origBytes, startPos,
        startPos + ( (BytesColumnVector) columnVector ).length[ currentBatchRow ] );
      return newBytes;
  }

  //if none of the cases match return a null
  return null;
}
 
Example 14
Source File: MySQLDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                            boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  if ( v.getLength() == DatabaseMeta.CLOB_LENGTH ) {
    v.setLength( getMaxTextFieldLength() );
  }
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "DATETIME";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      if ( supportsBooleanDataType() ) {
        retval += "BOOLEAN";
      } else {
        retval += "CHAR(1)";
      }
      break;

    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
        fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        if ( useAutoinc ) {
          retval += "BIGINT AUTO_INCREMENT NOT NULL PRIMARY KEY";
        } else {
          retval += "BIGINT NOT NULL PRIMARY KEY";
        }
      } else {
        // Integer values...
        if ( precision == 0 ) {
          if ( length > 9 ) {
            if ( length < 19 ) {
              // can hold signed values between -9223372036854775808 and 9223372036854775807
              // 18 significant digits
              retval += "BIGINT";
            } else {
              retval += "DECIMAL(" + length + ")";
            }
          } else {
            retval += "INT";
          }
        } else {
          // Floating point values...
          if ( length > 15 ) {
            retval += "DECIMAL(" + length;
            if ( precision > 0 ) {
              retval += ", " + precision;
            }
            retval += ")";
          } else {
            // A double-precision floating-point number is accurate to approximately 15 decimal places.
            // http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html
            retval += "DOUBLE";
          }
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length > 0 ) {
        if ( length == 1 ) {
          retval += "CHAR(1)";
        } else if ( length < 256 ) {
          retval += "VARCHAR(" + length + ")";
        } else if ( length < 65536 ) {
          retval += "TEXT";
        } else if ( length < 16777216 ) {
          retval += "MEDIUMTEXT";
        } else {
          retval += "LONGTEXT";
        }
      } else {
        retval += "TINYTEXT";
      }
      break;
    case ValueMetaInterface.TYPE_BINARY:
      retval += "LONGBLOB";
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 15
Source File: SalesforceInputMetaInjectionTest.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Test
public void test() throws Exception {
  check( "SALESFORCE_URL", () ->  meta.getTargetURL() );
  check( "SALESFORCE_USERNAME", () ->  meta.getUsername() );
  check( "SALESFORCE_PASSWORD", () ->  meta.getPassword() );
  check( "TIME_OUT", () ->  meta.getTimeout() );
  check( "USE_COMPRESSION", () ->  meta.isCompression() );
  check( "MODULE", () ->  meta.getModule() );
  check( "INCLUDE_SQL_IN_OUTPUT", () ->  meta.includeSQL() );
  check( "SQL_FIELDNAME", () ->  meta.getSQLField() );
  check( "INCLUDE_TIMESTAMP_IN_OUTPUT", () ->  meta.includeTimestamp() );
  check( "TIMESTAMP_FIELDNAME", () ->  meta.getTimestampField() );
  check( "INCLUDE_URL_IN_OUTPUT", () ->  meta.includeTargetURL() );
  check( "URL_FIELDNAME", () ->  meta.getTargetURLField() );
  check( "INCLUDE_MODULE_IN_OUTPUT", () ->  meta.includeModule() );
  check( "MODULE_FIELDNAME", () ->  meta.getModuleField() );
  check( "INCLUDE_DELETION_DATE_IN_OUTPUT", () ->  meta.includeDeletionDate() );
  check( "DELETION_DATE_FIELDNAME", () ->  meta.getDeletionDateField() );
  check( "INCLUDE_ROWNUM_IN_OUTPUT", () ->  meta.includeRowNumber() );
  check( "ROWNUM_FIELDNAME", () ->  meta.getRowNumberField() );
  check( "QUERY_CONDITION", () ->  meta.getCondition() );
  check( "LIMIT", () ->  meta.getRowLimit() );
  check( "USE_SPECIFIED_QUERY", () ->  meta.isSpecifyQuery() );
  check( "SPECIFY_QUERY", () ->  meta.getQuery() );
  check( "END_DATE", () ->  meta.getReadTo() );
  check( "START_DATE", () ->  meta.getReadFrom() );
  check( "QUERY_ALL", () ->  meta.isQueryAll() );
  checkStringToInt( "RETRIEVE", () ->  meta.getRecordsFilter(),
    SalesforceConnectionUtils.recordsFilterCode,
    new int[]{ RECORDS_FILTER_ALL, RECORDS_FILTER_UPDATED, RECORDS_FILTER_DELETED } );
  check( "NAME", () ->  meta.getInputFields()[0].getName() );
  check( "FIELD", () ->  meta.getInputFields()[0].getField() );
  check( "LENGTH", () ->  meta.getInputFields()[0].getLength() );
  check( "FORMAT", () ->  meta.getInputFields()[0].getFormat() );
  check( "PRECISION", () ->  meta.getInputFields()[0].getPrecision() );
  check( "CURRENCY", () ->  meta.getInputFields()[0].getCurrencySymbol() );
  check( "DECIMAL", () ->  meta.getInputFields()[0].getDecimalSymbol() );
  check( "GROUP", () ->  meta.getInputFields()[0].getGroupSymbol() );
  check( "REPEAT", () ->  meta.getInputFields()[0].isRepeated() );
  check( "ISIDLOOKUP", () ->  meta.getInputFields()[0].isIdLookup() );
  checkStringToInt( "TRIM_TYPE", () ->  meta.getInputFields()[0].getTrimType(),
    SalesforceInputField.trimTypeCode,
    new int[]{ TYPE_TRIM_NONE, TYPE_TRIM_LEFT, TYPE_TRIM_RIGHT, TYPE_TRIM_BOTH });
  int[] types = new int[]{
    ValueMetaInterface.TYPE_NONE,
    ValueMetaInterface.TYPE_NUMBER,
    ValueMetaInterface.TYPE_STRING,
    ValueMetaInterface.TYPE_DATE,
    ValueMetaInterface.TYPE_BOOLEAN,
    ValueMetaInterface.TYPE_INTEGER,
    ValueMetaInterface.TYPE_BIGNUMBER,
    ValueMetaInterface.TYPE_SERIALIZABLE,
    ValueMetaInterface.TYPE_BINARY,
    ValueMetaInterface.TYPE_TIMESTAMP,
    ValueMetaInterface.TYPE_INET
  };
  ValueMetaString valueMeta = new ValueMetaString();
  checkStringToInt("TYPE", () ->  meta.getInputFields()[0].getType(),
    valueMeta.typeCodes, types );
}
 
Example 16
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Clones the data. Normally, we don't have to do anything here, but just for arguments and safety, we do a little
 * extra work in case of binary blobs and Date objects. We should write a programmers manual later on to specify in
 * all clarity that "we always overwrite/replace values in the Object[] data rows, we never modify them" .
 *
 * @return a cloned data object if needed
 */
@Override
public Object cloneValueData( Object object ) throws KettleValueException {
  if ( object == null ) {
    return null;
  }

  if ( storageType == STORAGE_TYPE_NORMAL ) {
    switch ( getType() ) {
      case ValueMetaInterface.TYPE_STRING:
      case ValueMetaInterface.TYPE_NUMBER:
      case ValueMetaInterface.TYPE_INTEGER:
      case ValueMetaInterface.TYPE_BOOLEAN:
      case ValueMetaInterface.TYPE_BIGNUMBER: // primitive data types: we can only
        // overwrite these, not change them
        return object;

      case ValueMetaInterface.TYPE_DATE:
        return new Date( ( (Date) object ).getTime() ); // just to make sure: very
        // inexpensive too.

      case ValueMetaInterface.TYPE_BINARY:
        byte[] origin = (byte[]) object;
        byte[] target = new byte[origin.length];
        System.arraycopy( origin, 0, target, 0, origin.length );
        return target;

      case ValueMetaInterface.TYPE_SERIALIZABLE:
        // Let's not create a copy but simply return the same value.
        //
        return object;

      default:
        throw new KettleValueException( toString() + ": unable to make copy of value type: " + getType() );
    }
  } else {

    return object;

  }
}
 
Example 17
Source File: LDAPInput.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private Object getAttributeValue( LDAPInputField field, Attribute attr, int i, Object outputRowData ) throws Exception {

    if ( field.getType() == ValueMetaInterface.TYPE_BINARY ) {
      // It's a binary field
      // no need to convert, just return the value as it
      try {
        return attr.get();
      } catch ( java.lang.ClassCastException e ) {
        return attr.get().toString().getBytes();
      }
    }

    String retval = null;
    if ( field.getReturnType() == LDAPInputField.FETCH_ATTRIBUTE_AS_BINARY
      && field.getType() == ValueMetaInterface.TYPE_STRING ) {
      // Convert byte[] to string
      return LDAPConnection.extractBytesAndConvertToString( attr, field.isObjectSid() );
    }

    // extract as string
    retval = extractString( attr );

    // DO Trimming!
    switch ( field.getTrimType() ) {
      case LDAPInputField.TYPE_TRIM_LEFT:
        retval = Const.ltrim( retval );
        break;
      case LDAPInputField.TYPE_TRIM_RIGHT:
        retval = Const.rtrim( retval );
        break;
      case LDAPInputField.TYPE_TRIM_BOTH:
        retval = Const.trim( retval );
        break;
      default:
        break;
    }

    // DO CONVERSIONS...
    //
    ValueMetaInterface targetValueMeta = data.outputRowMeta.getValueMeta( i );
    ValueMetaInterface sourceValueMeta = data.convertRowMeta.getValueMeta( i );
    return targetValueMeta.convertData( sourceValueMeta, retval );

  }
 
Example 18
Source File: NeoviewDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      retval += "CHAR(1)";
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
        fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        retval += "INTEGER NOT NULL PRIMARY KEY";
      } else {
        // Integer values...
        if ( precision == 0 ) {
          if ( length > 9 ) {
            if ( length <= 18 ) { // can hold max. 18
              retval += "NUMERIC(" + length + ")";
            } else {
              retval += "FLOAT";
            }
          } else {
            retval += "INTEGER";
          }
        } else {
          // Floating point values...
          // A double-precision floating-point number is accurate to approximately 15 decimal places.
          // +/- 2.2250738585072014e-308 through +/-1.7976931348623157e+308; stored in 8 byte
          // NUMERIC values are stored in less bytes, so we try to use them instead of a FLOAT:
          // 1 to 4 digits in 2 bytes, 5 to 9 digits in 4 bytes, 10 to 18 digits in 8 bytes
          if ( length <= 18 ) {
            retval += "NUMERIC(" + length;
            if ( precision > 0 ) {
              retval += ", " + precision;
            }
            retval += ")";
          } else {
            retval += "FLOAT";
          }
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      // for LOB support see Neoview_JDBC_T4_Driver_Prog_Ref_2.2.pdf
      if ( length > 0 ) {
        if ( length <= 4028 ) {
          retval += "VARCHAR(" + length + ")";
        } else if ( length <= 4036 ) {
          retval += "CHAR(" + length + ")"; // squeezing 8 bytes ;-)
        } else {
          retval += "CLOB"; // before we go to CLOB
        }
      } else {
        retval += "CHAR(1)";
      }
      break;
    case ValueMetaInterface.TYPE_BINARY:
      retval += "BLOB";
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 19
Source File: FirebirdDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    if ( Const.indexOfString( fieldname, getReservedWords() ) >= 0 ) {
      retval += getStartQuote() + fieldname + getEndQuote();
    } else {
      retval += fieldname + " ";
    }
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      if ( supportsBooleanDataType() ) {
        retval += "BIT";
      } else {
        retval += "CHAR(1)";
      }
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
        fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        retval += "BIGINT NOT NULL PRIMARY KEY";
      } else {
        if ( length > 0 ) {
          if ( precision > 0 || length > 18 ) {
            retval += "DECIMAL(" + length;
            if ( precision > 0 ) {
              retval += ", " + precision;
            }
            retval += ")";
          } else {
            if ( length > 9 ) {
              retval += "BIGINT";
            } else {
              if ( length < 5 ) {
                retval += "SMALLINT";
              } else {
                retval += "INTEGER";
              }
            }
          }
        } else {
          retval += "DOUBLE";
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length < 32720 ) {
        retval += "VARCHAR";
        if ( length > 0 ) {
          retval += "(" + length + ")";
        } else {
          retval += "(8000)"; // Maybe use some default DB String length?
        }
      } else {
        retval += "BLOB SUB_TYPE TEXT";
      }
      break;
    case ValueMetaInterface.TYPE_BINARY:
      retval += "BLOB";
      break;
    default:
      retval += "UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 20
Source File: AccessOutputMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public static Object[] createObjectsForRow( RowMetaInterface rowMeta, Object[] rowData ) throws KettleValueException {
  Object[] values = new Object[rowMeta.size()];
  for ( int i = 0; i < rowMeta.size(); i++ ) {
    ValueMetaInterface valueMeta = rowMeta.getValueMeta( i );
    Object valueData = rowData[i];

    // Prevent a NullPointerException below
    if ( valueData == null || valueMeta == null ) {
      values[i] = null;
      continue;
    }

    int length = valueMeta.getLength();

    switch ( valueMeta.getType() ) {
      case ValueMetaInterface.TYPE_INTEGER:
        if ( length < 3 ) {
          values[i] = new Byte( valueMeta.getInteger( valueData ).byteValue() );
        } else {
          if ( length < 5 ) {
            values[i] = new Short( valueMeta.getInteger( valueData ).shortValue() );
          } else {
            values[i] = valueMeta.getInteger( valueData );
          }
        }
        break;
      case ValueMetaInterface.TYPE_NUMBER:
        values[i] = valueMeta.getNumber( valueData );
        break;
      case ValueMetaInterface.TYPE_DATE:
        values[i] = valueMeta.getDate( valueData );
        break;
      case ValueMetaInterface.TYPE_STRING:
        values[i] = valueMeta.getString( valueData );
        break;
      case ValueMetaInterface.TYPE_BINARY:
        values[i] = valueMeta.getBinary( valueData );
        break;
      case ValueMetaInterface.TYPE_BOOLEAN:
        values[i] = valueMeta.getBoolean( valueData );
        break;
      case ValueMetaInterface.TYPE_BIGNUMBER:
        values[i] = valueMeta.getNumber( valueData );
        break;
      default:
        break;
    }
  }
  return values;
}