Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#TYPE_INTEGER

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#TYPE_INTEGER . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: MongodbInputDiscoverFieldsImpl.java    From pentaho-mongodb-plugin with Apache License 2.0 6 votes vote down vote up
protected static int mongoToKettleType( Object fieldValue ) {
  if ( fieldValue == null ) {
    return ValueMetaInterface.TYPE_STRING;
  }

  if ( fieldValue instanceof Symbol || fieldValue instanceof String || fieldValue instanceof Code
        || fieldValue instanceof ObjectId || fieldValue instanceof MinKey || fieldValue instanceof MaxKey ) {
    return ValueMetaInterface.TYPE_STRING;
  } else if ( fieldValue instanceof Date ) {
    return ValueMetaInterface.TYPE_DATE;
  } else if ( fieldValue instanceof Number ) {
    // try to parse as an Integer
    try {
      Integer.parseInt( fieldValue.toString() );
      return ValueMetaInterface.TYPE_INTEGER;
    } catch ( NumberFormatException e ) {
      return ValueMetaInterface.TYPE_NUMBER;
    }
  } else if ( fieldValue instanceof Binary ) {
    return ValueMetaInterface.TYPE_BINARY;
  } else if ( fieldValue instanceof BSONTimestamp ) {
    return ValueMetaInterface.TYPE_INTEGER;
  }

  return ValueMetaInterface.TYPE_STRING;
}
 
Example 2
Source File: DataSetCsvGroup.java    From pentaho-pdi-dataset with Apache License 2.0 6 votes vote down vote up
private static void setValueFormats( RowMetaInterface rowMeta ) {
  for ( ValueMetaInterface valueMeta : rowMeta.getValueMetaList() ) {
    if ( StringUtils.isEmpty( valueMeta.getConversionMask() ) ) {
      switch ( valueMeta.getType() ) {
        case ValueMetaInterface.TYPE_INTEGER:
          valueMeta.setConversionMask( "0" );
          break;
        case ValueMetaInterface.TYPE_NUMBER:
          valueMeta.setConversionMask( "0.#" );
          break;
        case ValueMetaInterface.TYPE_DATE:
          valueMeta.setConversionMask( "yyyyMMdd-HHmmss.SSS" );
          break;
        default:
          break;
      }
    }
  }
}
 
Example 3
Source File: TableView.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private String[] getComboValues( TableItem row, ColumnInfo colinfo ) {
  if ( colinfo.getType() == ColumnInfo.COLUMN_TYPE_FORMAT ) {
    int type = ValueMetaFactory.getIdForValueMeta( row.getText( colinfo.getFieldTypeColumn() ) );
    switch ( type ) {
      case ValueMetaInterface.TYPE_DATE:
        return Const.getDateFormats();
      case ValueMetaInterface.TYPE_INTEGER:
      case ValueMetaInterface.TYPE_BIGNUMBER:
      case ValueMetaInterface.TYPE_NUMBER:
        return Const.getNumberFormats();
      case ValueMetaInterface.TYPE_STRING:
        return Const.getConversionFormats();
      default:
        return new String[0];
    }
  }
  return colinfo.getComboValues();
}
 
Example 4
Source File: CalculatorUnitTest.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
private String getKettleTypeName( int kettleNumberDataType ) {
  final String kettleNumberDataTypeName;
  switch ( kettleNumberDataType ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
      kettleNumberDataTypeName = "BigNumber(" + kettleNumberDataType + ")";
      break;
    case ValueMetaInterface.TYPE_NUMBER:
      kettleNumberDataTypeName = "Number(" + kettleNumberDataType + ")";
      break;
    case ValueMetaInterface.TYPE_INTEGER:
      kettleNumberDataTypeName = "Integer(" + kettleNumberDataType + ")";
      break;
    default:
      kettleNumberDataTypeName = "?(" + kettleNumberDataType + ")";
  }
  return kettleNumberDataTypeName;
}
 
Example 5
Source File: CassandraColumnMetaData.java    From learning-hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Return the Cassandra CQL column/key type for the given Kettle column. We
 * use this type for CQL create column family statements since, for some
 * reason, the internal type isn't recognized for the key. Internal types
 * *are* recognized for column definitions. The CQL reference guide states
 * that fully qualified (or relative to org.apache.cassandra.db.marshal) class
 * names can be used instead of CQL types - however, using these when defining
 * the key type always results in BytesType getting set for the key for some
 * reason.
 * 
 * @param vm the ValueMetaInterface for the Kettle column
 * @return the corresponding CQL type
 */
public static String getCQLTypeForValueMeta(ValueMetaInterface vm) {
  switch (vm.getType()) {
  case ValueMetaInterface.TYPE_STRING:
    return "varchar";
  case ValueMetaInterface.TYPE_BIGNUMBER:
    return "decimal";
  case ValueMetaInterface.TYPE_BOOLEAN:
    return "boolean";
  case ValueMetaInterface.TYPE_INTEGER:
    return "bigint";
  case ValueMetaInterface.TYPE_NUMBER:
    return "double";
  case ValueMetaInterface.TYPE_DATE:
    return "timestamp";
  case ValueMetaInterface.TYPE_BINARY:
  case ValueMetaInterface.TYPE_SERIALIZABLE:
    return "blob";
  }

  return "blob";
}
 
Example 6
Source File: XsdType.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public static int xsdTypeToKettleType( String aXsdType ) {
  int vRet = ValueMetaInterface.TYPE_NONE;
  if ( aXsdType != null ) {
    if ( aXsdType.equalsIgnoreCase( DATE ) ) {
      vRet = ValueMetaInterface.TYPE_DATE;
    } else if ( aXsdType.equalsIgnoreCase( TIME ) ) {
      vRet = ValueMetaInterface.TYPE_DATE;
    } else if ( aXsdType.equalsIgnoreCase( DATE_TIME ) ) {
      vRet = ValueMetaInterface.TYPE_DATE;
    } else if ( aXsdType.equalsIgnoreCase( INTEGER ) || aXsdType.equalsIgnoreCase( INTEGER_DESC ) ) {
      vRet = ValueMetaInterface.TYPE_INTEGER;
    } else if ( aXsdType.equalsIgnoreCase( SHORT ) ) {
      vRet = ValueMetaInterface.TYPE_INTEGER;
    } else if ( aXsdType.equalsIgnoreCase( BOOLEAN ) ) {
      vRet = ValueMetaInterface.TYPE_BOOLEAN;
    } else if ( aXsdType.equalsIgnoreCase( STRING ) ) {
      vRet = ValueMetaInterface.TYPE_STRING;
    } else if ( aXsdType.equalsIgnoreCase( DOUBLE ) ) {
      vRet = ValueMetaInterface.TYPE_NUMBER;
    } else if ( aXsdType.equalsIgnoreCase( BINARY ) ) {
      vRet = ValueMetaInterface.TYPE_BINARY;
    } else if ( aXsdType.equalsIgnoreCase( DECIMAL ) ) {
      vRet = ValueMetaInterface.TYPE_BIGNUMBER;
    } else {
      // When all else fails, map it to a String
      vRet = ValueMetaInterface.TYPE_NONE;
    }
  }
  return vRet;
}
 
Example 7
Source File: ParquetConverter.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
private void addValueMeta( int pdiType, String pentahoFieldName ) {
  switch ( pdiType ) {
    case ValueMetaInterface.TYPE_BINARY:
      fields.addValueMeta( new ValueMetaBinary( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_BIGNUMBER:
      fields.addValueMeta( new ValueMetaBigNumber( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      fields.addValueMeta( new ValueMetaBoolean( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_DATE:
      fields.addValueMeta( new ValueMetaDate( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_INET:
      fields.addValueMeta( new ValueMetaInternetAddress( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_INTEGER:
      fields.addValueMeta( new ValueMetaInteger( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_NUMBER:
      fields.addValueMeta( new ValueMetaNumber( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_STRING:
      fields.addValueMeta( new ValueMetaString( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_TIMESTAMP:
      fields.addValueMeta( new ValueMetaTimestamp( pentahoFieldName ) );
      break;
  }
}
 
Example 8
Source File: RowMetaAndData.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public boolean isEmptyValue( String valueName ) throws KettleValueException {
  int idx = rowMeta.indexOfValue( valueName );
  if ( idx < 0 ) {
    throw new KettleValueException( "Unknown column '" + valueName + "'" );
  }

  ValueMetaInterface metaType = rowMeta.getValueMeta( idx );
  // find by source value type
  switch ( metaType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
      return rowMeta.getString( data, idx ) == null;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return rowMeta.getBoolean( data, idx ) == null;
    case ValueMetaInterface.TYPE_INTEGER:
      return rowMeta.getInteger( data, idx ) == null;
    case ValueMetaInterface.TYPE_NUMBER:
      return rowMeta.getNumber( data, idx ) == null;
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return rowMeta.getBigNumber( data, idx ) == null;
    case ValueMetaInterface.TYPE_BINARY:
      return rowMeta.getBinary( data, idx ) == null;
    case ValueMetaInterface.TYPE_DATE:
    case ValueMetaInterface.TYPE_TIMESTAMP:
      return rowMeta.getDate( data, idx ) == null;
    case ValueMetaInterface.TYPE_INET:
      return rowMeta.getString( data, idx ) == null;
  }
  throw new KettleValueException( "Unknown source type: " + metaType.getTypeDesc() );
}
 
Example 9
Source File: DatabaseLookupUTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
private DatabaseLookupData getCreatedData( boolean allEquals ) throws Exception {
  Database db = mock( Database.class );
  when( db.getRows( anyString(), anyInt() ) )
    .thenReturn( Collections.singletonList( new Object[] { 1L } ) );

  RowMeta returnRowMeta = new RowMeta();
  returnRowMeta.addValueMeta( new ValueMetaInteger() );
  when( db.getReturnRowMeta() ).thenReturn( returnRowMeta );

  DatabaseLookupMeta meta = createTestMeta();
  DatabaseLookupData data = new DatabaseLookupData();

  DatabaseLookup step = createSpiedStep( db, mockHelper, meta );
  step.init( meta, data );


  data.db = db;
  data.keytypes = new int[] { ValueMetaInterface.TYPE_INTEGER };
  if ( allEquals ) {
    data.allEquals = true;
    data.conditions = new int[] { DatabaseLookupMeta.CONDITION_EQ };
  } else {
    data.allEquals = false;
    data.conditions = new int[] { DatabaseLookupMeta.CONDITION_LT };
  }
  step.processRow( meta, data );

  return data;
}
 
Example 10
Source File: ValueMetaBaseTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testCompareIntegers() throws KettleValueException {
  ValueMetaBase intMeta = new ValueMetaBase( "int", ValueMetaInterface.TYPE_INTEGER );
  Long int1 = new Long( 6223372036854775804L );
  Long int2 = new Long( -6223372036854775804L );
  assertEquals( 1, intMeta.compare( int1, int2 ) );
  assertEquals( -1, intMeta.compare( int2, int1 ) );
  assertEquals( 0, intMeta.compare( int1, int1 ) );
  assertEquals( 0, intMeta.compare( int2, int2 ) );

  int1 = new Long( 9223372036854775804L );
  int2 = new Long( -9223372036854775804L );
  assertEquals( 1, intMeta.compare( int1, int2 ) );
  assertEquals( -1, intMeta.compare( int2, int1 ) );
  assertEquals( 0, intMeta.compare( int1, int1 ) );
  assertEquals( 0, intMeta.compare( int2, int2 ) );

  int1 = new Long( 6223372036854775804L );
  int2 = new Long( -9223372036854775804L );
  assertEquals( 1, intMeta.compare( int1, int2 ) );
  assertEquals( -1, intMeta.compare( int2, int1 ) );
  assertEquals( 0, intMeta.compare( int1, int1 ) );

  int1 = new Long( 9223372036854775804L );
  int2 = new Long( -6223372036854775804L );
  assertEquals( 1, intMeta.compare( int1, int2 ) );
  assertEquals( -1, intMeta.compare( int2, int1 ) );
  assertEquals( 0, intMeta.compare( int1, int1 ) );

  int1 = null;
  int2 = new Long( 6223372036854775804L );
  assertEquals( -1, intMeta.compare( int1, int2 ) );
  intMeta.setSortedDescending( true );
  assertEquals( 1, intMeta.compare( int1, int2 ) );

}
 
Example 11
Source File: ValueMetaBaseTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void testNullHashCodes() throws Exception {
  ValueMetaBase valueMetaString = new ValueMetaBase( );

  valueMetaString.type = ValueMetaInterface.TYPE_BOOLEAN;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 1 );

  valueMetaString.type = ValueMetaInterface.TYPE_DATE;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 2 );

  valueMetaString.type = ValueMetaInterface.TYPE_NUMBER;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 4 );

  valueMetaString.type = ValueMetaInterface.TYPE_STRING;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 8 );

  valueMetaString.type = ValueMetaInterface.TYPE_INTEGER;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 16 );

  valueMetaString.type = ValueMetaInterface.TYPE_BIGNUMBER;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 32 );

  valueMetaString.type = ValueMetaInterface.TYPE_BINARY;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 64 );

  valueMetaString.type = ValueMetaInterface.TYPE_TIMESTAMP;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 128 );

  valueMetaString.type = ValueMetaInterface.TYPE_INET;
  assertEquals( valueMetaString.hashCode( null ),  0 ^ 256 );

  valueMetaString.type = ValueMetaInterface.TYPE_NONE;
  assertEquals( valueMetaString.hashCode( null ),  0 );
}
 
Example 12
Source File: DatabaseLookupUTest.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
@Test
public void createsReadDefaultCache_AndUsesOnlyNeededFieldsFromMeta() throws Exception {
  Database db = mock( Database.class );
  when( db.getRows( anyString(), anyInt() ) )
    .thenReturn( Arrays.asList( new Object[] { 1L }, new Object[] { 2L } ) );

  RowMeta returnRowMeta = new RowMeta();
  returnRowMeta.addValueMeta( new ValueMetaInteger() );
  returnRowMeta.addValueMeta( new ValueMetaInteger() );
  when( db.getReturnRowMeta() ).thenReturn( returnRowMeta );

  DatabaseLookupMeta meta = createTestMeta();
  DatabaseLookupData data = new DatabaseLookupData();

  DatabaseLookup step = createSpiedStep( db, mockHelper, meta );
  step.init( meta, data );

  data.db = db;
  data.keytypes = new int[] { ValueMetaInterface.TYPE_INTEGER };
  data.allEquals = true;
  data.conditions = new int[] { DatabaseLookupMeta.CONDITION_EQ };

  step.processRow( meta, data );

  data.lookupMeta = new RowMeta();
  data.lookupMeta.addValueMeta( new ValueMetaInteger() );

  assertNotNull( data.cache.getRowFromCache( data.lookupMeta, new Object[] { 1L } ) );
  assertNotNull( data.cache.getRowFromCache( data.lookupMeta, new Object[] { 2L } ) );
}
 
Example 13
Source File: CiviStep.java    From civicrm-data-integration with GNU General Public License v3.0 5 votes vote down vote up
protected Object getObjectValue(String field, String object) {
    try {
        if (object == null || object.equals("")) {
            return null;
        }

        CiviField cf = ((CiviMeta) civiMeta).getCiviCrmListingFields().get(field);

        int metaType =  ValueMetaInterface.TYPE_STRING;
        if (cf != null)
           metaType = cf.getMetaInterfaceType();

        switch (metaType) {
        case ValueMetaInterface.TYPE_INTEGER:
            return Long.parseLong(object);
        case ValueMetaInterface.TYPE_STRING:
            return object.toString();
        case ValueMetaInterface.TYPE_NUMBER:
            return Double.parseDouble(object);
        case ValueMetaInterface.TYPE_DATE:
            SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
            return formatter.parse(object);
        case ValueMetaInterface.TYPE_BIGNUMBER:
            return new BigDecimal(object.toString());
        case ValueMetaInterface.TYPE_BOOLEAN:
            return Boolean.parseBoolean(object);
        case ValueMetaInterface.TYPE_BINARY:
            throw new KettleValueException(toString() + " : I don't know how to convert binary values to integers.");
        case ValueMetaInterface.TYPE_SERIALIZABLE:
            throw new KettleValueException(toString()
                    + " : I don't know how to convert serializable values to integers.");
        default:
            throw new KettleValueException(toString() + " : Unknown type " + metaType + " specified.");
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return null;
}
 
Example 14
Source File: BeamBQOutputTransform.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
@Override public PDone expand( PCollection<KettleRow> input ) {

    try {
      // Only initialize once on this node/vm
      //
      BeamKettle.init( stepPluginClasses, xpPluginClasses );

      // Inflate the metadata on the node where this is running...
      //
      RowMetaInterface rowMeta = JsonRowMeta.fromJson( rowMetaJson );


      // Which table do we write to?
      //
      TableReference tableReference = new TableReference();
      if ( StringUtils.isNotEmpty( projectId ) ) {
        tableReference.setProjectId( projectId );
      }
      tableReference.setDatasetId( datasetId );
      tableReference.setTableId( tableId );

      TableSchema tableSchema = new TableSchema();
      List<TableFieldSchema> schemaFields = new ArrayList<>();
      for ( ValueMetaInterface valueMeta : rowMeta.getValueMetaList() ) {
        TableFieldSchema schemaField = new TableFieldSchema();
        schemaField.setName( valueMeta.getName() );
        switch(valueMeta.getType()){
          case ValueMetaInterface.TYPE_STRING: schemaField.setType( "STRING" ); break;
          case ValueMetaInterface.TYPE_INTEGER: schemaField.setType( "INTEGER" ); break;
          case ValueMetaInterface.TYPE_DATE: schemaField.setType( "DATETIME" ); break;
          case ValueMetaInterface.TYPE_BOOLEAN: schemaField.setType( "BOOLEAN" ); break;
          case ValueMetaInterface.TYPE_NUMBER: schemaField.setType( "FLOAT" ); break;
          default:
            throw new RuntimeException( "Conversion from Kettle value "+valueMeta.toString()+" to BigQuery TableRow isn't supported yet" );
        }
        schemaFields.add(schemaField);
      }
      tableSchema.setFields( schemaFields );

      SerializableFunction<KettleRow, TableRow> formatFunction = new KettleToBQTableRowFn( stepname, rowMetaJson, stepPluginClasses, xpPluginClasses );

      BigQueryIO.Write.CreateDisposition createDisposition;
      if (createIfNeeded) {
        createDisposition = BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED;
      }  else {
        createDisposition = BigQueryIO.Write.CreateDisposition.CREATE_NEVER;
      }

      BigQueryIO.Write.WriteDisposition writeDisposition;
      if (truncateTable) {
        writeDisposition = BigQueryIO.Write.WriteDisposition.WRITE_APPEND;
      } else {
        if (failIfNotEmpty) {
          writeDisposition = BigQueryIO.Write.WriteDisposition.WRITE_EMPTY;
        } else {
          writeDisposition = BigQueryIO.Write.WriteDisposition.WRITE_APPEND;
        }
      }

      BigQueryIO.Write<KettleRow> bigQueryWrite = BigQueryIO
        .<KettleRow>write()
        .to( tableReference )
        .withSchema( tableSchema )
        .withCreateDisposition( createDisposition )
        .withWriteDisposition( writeDisposition )
        .withFormatFunction( formatFunction );

      // TODO: pass the results along the way at some point
      //
      input.apply( stepname, bigQueryWrite );

      // End of the line
      //
      return PDone.in( input.getPipeline() );

    } catch ( Exception e ) {
      numErrors.inc();
      LOG.error( "Error in Beam BigQuery output transform", e );
      throw new RuntimeException( "Error in Beam BigQuery output transform", e );
    }
  }
 
Example 15
Source File: MSAccessDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "DATETIME";
      break;
    // Move back to Y/N for bug - [# 1538] Repository on MS ACCESS: error creating repository
    case ValueMetaInterface.TYPE_BOOLEAN:
      if ( supportsBooleanDataType() ) {
        retval += "BIT";
      } else {
        retval += "CHAR(1)";
      }
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
        fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        if ( useAutoinc ) {
          retval += "COUNTER PRIMARY KEY";
        } else {
          retval += "LONG PRIMARY KEY";
        }
      } else {
        if ( precision == 0 ) {
          if ( length > 9 ) {
            retval += "DOUBLE";
          } else {
            if ( length > 5 ) {
              retval += "LONG";
            } else {
              retval += "INTEGER";
            }
          }
        } else {
          retval += "DOUBLE";
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length > 0 ) {
        if ( length < 256 ) {
          retval += "TEXT(" + length + ")";
        } else {
          retval += "MEMO";
        }
      } else {
        retval += "TEXT";
      }
      break;
    case ValueMetaInterface.TYPE_BINARY:
      retval += " LONGBINARY";
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 16
Source File: PostgreSQLDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  if ( v.getLength() == DatabaseMeta.CLOB_LENGTH ) {
    v.setLength( getMaxTextFieldLength() );
  }
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      if ( supportsBooleanDataType() ) {
        retval += "BOOLEAN";
      } else {
        retval += "CHAR(1)";
      }
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
          fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        retval += "BIGSERIAL";
      } else {
        if ( length > 0 ) {
          if ( precision > 0 || length > 18 ) {
            // Numeric(Precision, Scale): Precision = total length; Scale = decimal places
            retval += "NUMERIC(" + ( length + precision ) + ", " + precision + ")";
          } else {
            if ( length > 9 ) {
              retval += "BIGINT";
            } else {
              if ( length < 5 ) {
                retval += "SMALLINT";
              } else {
                retval += "INTEGER";
              }
            }
          }

        } else {
          retval += "DOUBLE PRECISION";
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length < 1 || length >= DatabaseMeta.CLOB_LENGTH ) {
        retval += "TEXT";
      } else {
        retval += "VARCHAR(" + length + ")";
      }
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 17
Source File: VerticaDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int length = v.getLength();
  // Unused in vertica
  // int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_DATE:
    case ValueMetaInterface.TYPE_TIMESTAMP:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      retval += "BOOLEAN";
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      retval += "FLOAT";
      break;
    case ValueMetaInterface.TYPE_INTEGER:
      retval += "INTEGER";
      break;
    case ValueMetaInterface.TYPE_STRING:
      retval += ( length < 1 ) ? "VARCHAR" : "VARCHAR(" + length + ")";
      break;
    case ValueMetaInterface.TYPE_BINARY:
      retval += ( length < 1 ) ? "VARBINARY" : "VARBINARY(" + length + ")";
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 18
Source File: GoogleBigQueryDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                            boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
      retval += "TIMESTAMP";
      break;

    case ValueMetaInterface.TYPE_DATE:
      retval += "DATE";
      break;

    case ValueMetaInterface.TYPE_BOOLEAN:
      retval += "BOOL";
      break;

    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( precision == 0 ) {
        retval += "INT64";
      } else {
        retval += "FLOAT64";
      }
      if ( fieldname.equalsIgnoreCase( tk )
        || fieldname.equalsIgnoreCase( pk ) ) {
        retval += " NOT NULL";
      }
      break;

    case ValueMetaInterface.TYPE_STRING:
      retval += "STRING";
      break;

    case ValueMetaInterface.TYPE_BINARY:
      retval += "BYTES";
      break;

    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 19
Source File: NeoviewDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * This method allows a database dialect to convert database specific data types to Kettle data types.
 *
 * @param resultSet
 *          The result set to use
 * @param valueMeta
 *          The description of the value to retrieve
 * @param index
 *          the index on which we need to retrieve the value, 0-based.
 * @return The correctly converted Kettle data type corresponding to the valueMeta description.
 * @throws KettleDatabaseException
 */
@Override
public Object getValueFromResultSet( ResultSet rs, ValueMetaInterface val, int i ) throws KettleDatabaseException {
  Object data = null;

  try {
    switch ( val.getType() ) {
      case ValueMetaInterface.TYPE_BOOLEAN:
        data = Boolean.valueOf( rs.getBoolean( i + 1 ) );
        break;
      case ValueMetaInterface.TYPE_NUMBER:
        data = new Double( rs.getDouble( i + 1 ) );
        break;
      case ValueMetaInterface.TYPE_BIGNUMBER:
        data = rs.getBigDecimal( i + 1 );
        break;
      case ValueMetaInterface.TYPE_INTEGER:
        data = Long.valueOf( rs.getLong( i + 1 ) );
        break;
      case ValueMetaInterface.TYPE_STRING:
        if ( val.isStorageBinaryString() ) {
          data = rs.getBytes( i + 1 );
        } else {
          data = rs.getString( i + 1 );
        }
        break;
      case ValueMetaInterface.TYPE_BINARY:
        if ( supportsGetBlob() ) {
          Blob blob = rs.getBlob( i + 1 );
          if ( blob != null ) {
            data = blob.getBytes( 1L, (int) blob.length() );
          } else {
            data = null;
          }
        } else {
          data = rs.getBytes( i + 1 );
        }
        break;
      case ValueMetaInterface.TYPE_TIMESTAMP:
      case ValueMetaInterface.TYPE_DATE:
        if ( val.getOriginalColumnType() == java.sql.Types.TIME ) {
          // Neoview can not handle getDate / getTimestamp for a Time column
          data = rs.getTime( i + 1 );
          break; // Time is a subclass of java.util.Date, the default date
                 // will be 1970-01-01
        } else if ( val.getPrecision() != 1 && supportsTimeStampToDateConversion() ) {
          data = rs.getTimestamp( i + 1 );
          break; // Timestamp extends java.util.Date
        } else {
          data = rs.getDate( i + 1 );
          break;
        }
      default:
        break;
    }
    if ( rs.wasNull() ) {
      data = null;
    }
  } catch ( SQLException e ) {
    throw new KettleDatabaseException( "Unable to get value '"
      + val.toStringMeta() + "' from database resultset, index " + i, e );
  }

  return data;
}
 
Example 20
Source File: CacheDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      retval += "CHAR(1)";
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) ) { // Technical & primary key : see at bottom
        retval += "DECIMAL";
      } else {
        if ( length < 0 || precision < 0 ) {
          retval += "DOUBLE";
        } else if ( precision > 0 || length > 9 ) {
          retval += "DECIMAL(" + length;
          if ( precision > 0 ) {
            retval += ", " + precision;
          }
          retval += ")";
        } else {
          // Precision == 0 && length<=9
          retval += "INT";
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING: // CLOBs are just VARCHAR in the Cache database: can be very large!
      retval += "VARCHAR";
      if ( length > 0 ) {
        retval += "(" + length + ")";
      }
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}