Java Code Examples for org.pentaho.di.core.row.ValueMetaInterface#TYPE_DATE

The following examples show how to use org.pentaho.di.core.row.ValueMetaInterface#TYPE_DATE . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: InlineEtlQueryExecutor.java    From pentaho-metadata with GNU Lesser General Public License v2.1 6 votes vote down vote up
private int convertType( DataType type ) {
  switch ( type ) {
    case DATE:
      return ValueMetaInterface.TYPE_DATE;
    case BOOLEAN:
      return ValueMetaInterface.TYPE_BOOLEAN;
    case NUMERIC:
      return ValueMetaInterface.TYPE_NUMBER;
    case BINARY:
    case IMAGE:
      return ValueMetaInterface.TYPE_BINARY;
    case UNKNOWN:
    case URL:
    case STRING:
    default:
      return ValueMetaInterface.TYPE_STRING;
  }
}
 
Example 2
Source File: RowForumulaContext.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static Object getPrimitive( ValueMetaInterface valueMeta, Object valueData ) throws KettleValueException {
  switch ( valueMeta.getType() ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return valueMeta.getBigNumber( valueData );
    case ValueMetaInterface.TYPE_BINARY:
      return valueMeta.getBinary( valueData );
    case ValueMetaInterface.TYPE_BOOLEAN:
      return valueMeta.getBoolean( valueData );
    case ValueMetaInterface.TYPE_DATE:
      return valueMeta.getDate( valueData );
    case ValueMetaInterface.TYPE_INTEGER:
      return valueMeta.getInteger( valueData );
    case ValueMetaInterface.TYPE_NUMBER:
      return valueMeta.getNumber( valueData );
      // case ValueMetaInterface.TYPE_SERIALIZABLE: return valueMeta.(valueData);
    case ValueMetaInterface.TYPE_STRING:
      return valueMeta.getString( valueData );
    default:
      return null;
  }
}
 
Example 3
Source File: CassandraColumnMetaData.java    From learning-hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Return the Cassandra column type (internal cassandra class name relative to
 * org.apache.cassandra.db.marshal) for the given Kettle column.
 * 
 * @param vm the ValueMetaInterface for the Kettle column
 * @return the corresponding internal cassandra type.
 */
public static String getCassandraTypeForValueMeta(ValueMetaInterface vm) {
  switch (vm.getType()) {
  case ValueMetaInterface.TYPE_STRING:
    return "UTF8Type";
  case ValueMetaInterface.TYPE_BIGNUMBER:
    return "DecimalType";
  case ValueMetaInterface.TYPE_BOOLEAN:
    return "BooleanType";
  case ValueMetaInterface.TYPE_INTEGER:
    return "LongType";
  case ValueMetaInterface.TYPE_NUMBER:
    return "DoubleType";
  case ValueMetaInterface.TYPE_DATE:
    return "DateType";
  case ValueMetaInterface.TYPE_BINARY:
  case ValueMetaInterface.TYPE_SERIALIZABLE:
    return "BytesType";
  }

  return "UTF8Type";
}
 
Example 4
Source File: RowForumulaContext.java    From pentaho-kettle with Apache License 2.0 6 votes vote down vote up
public static Class<?> getPrimitiveClass( int valueType ) {
  switch ( valueType ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return BigDecimal.class;
    case ValueMetaInterface.TYPE_BINARY:
      return ( new byte[] {} ).getClass();
    case ValueMetaInterface.TYPE_BOOLEAN:
      return Boolean.class;
    case ValueMetaInterface.TYPE_DATE:
      return Date.class;
    case ValueMetaInterface.TYPE_INTEGER:
      return Long.class;
    case ValueMetaInterface.TYPE_NUMBER:
      return Double.class;
      // case Value.VALUE_TYPE_SERIALIZABLE: return Serializable.class;
    case ValueMetaInterface.TYPE_STRING:
      return String.class;
    default:
      return null;
  }
}
 
Example 5
Source File: PhysicalTableImporter.java    From pentaho-metadata with GNU Lesser General Public License v2.1 5 votes vote down vote up
private static DataTypeSettings getDataTypeSettings( ValueMetaInterface v ) {
  DataTypeSettings dataTypeSettings = new DataTypeSettings( DataTypeSettings.DATA_TYPE_STRING );
  switch ( v.getType() ) {
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_NUMBER:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_NUMERIC );
      break;

    case ValueMetaInterface.TYPE_BINARY:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_BINARY );
      break;

    case ValueMetaInterface.TYPE_BOOLEAN:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_BOOLEAN );
      break;

    case ValueMetaInterface.TYPE_DATE:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_DATE );
      break;

    case ValueMetaInterface.TYPE_STRING:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_STRING );
      break;

    case ValueMetaInterface.TYPE_NONE:
      dataTypeSettings.setType( DataTypeSettings.DATA_TYPE_UNKNOWN );
      break;

    default:
      break;
  }
  dataTypeSettings.setLength( v.getLength() );
  dataTypeSettings.setPrecision( v.getPrecision() );

  return dataTypeSettings;
}
 
Example 6
Source File: StreamingInputTest.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
private ValueMetaInterface getValueMetaInterface( String fieldName, int fieldType ) {
  switch ( fieldType ) {
    case ValueMetaInterface.TYPE_INET:
      return new ValueMetaInternetAddress( fieldName );
    case ValueMetaInterface.TYPE_STRING:
      return new ValueMetaString( fieldName );
    case ValueMetaInterface.TYPE_INTEGER:
      return new ValueMetaInteger( fieldName );
    case ValueMetaInterface.TYPE_NUMBER:
      return new ValueMetaNumber( fieldName );
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return new ValueMetaBigNumber( fieldName );
    case ValueMetaInterface.TYPE_TIMESTAMP:
      ValueMetaTimestamp valueMetaTimestamp = new ValueMetaTimestamp( fieldName );
      valueMetaTimestamp.setConversionMask( "yyyy/MM/dd HH:mm:ss.SSS" );
      return valueMetaTimestamp;
    case ValueMetaInterface.TYPE_DATE:
      ValueMetaDate valueMetaDate = new ValueMetaDate( fieldName );
      valueMetaDate.setConversionMask( "yyyy/MM/dd HH:mm:ss.SSS" );
      return valueMetaDate;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return new ValueMetaBoolean( fieldName );
    case ValueMetaInterface.TYPE_BINARY:
      return new ValueMetaBinary( fieldName );
  }
  return null;
}
 
Example 7
Source File: ParquetConverter.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
private void addValueMeta( int pdiType, String pentahoFieldName ) {
  switch ( pdiType ) {
    case ValueMetaInterface.TYPE_BINARY:
      fields.addValueMeta( new ValueMetaBinary( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_BIGNUMBER:
      fields.addValueMeta( new ValueMetaBigNumber( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      fields.addValueMeta( new ValueMetaBoolean( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_DATE:
      fields.addValueMeta( new ValueMetaDate( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_INET:
      fields.addValueMeta( new ValueMetaInternetAddress( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_INTEGER:
      fields.addValueMeta( new ValueMetaInteger( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_NUMBER:
      fields.addValueMeta( new ValueMetaNumber( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_STRING:
      fields.addValueMeta( new ValueMetaString( pentahoFieldName ) );
      break;
    case ValueMetaInterface.TYPE_TIMESTAMP:
      fields.addValueMeta( new ValueMetaTimestamp( pentahoFieldName ) );
      break;
  }
}
 
Example 8
Source File: RowMetaAndData.java    From pentaho-kettle with Apache License 2.0 5 votes vote down vote up
public boolean isEmptyValue( String valueName ) throws KettleValueException {
  int idx = rowMeta.indexOfValue( valueName );
  if ( idx < 0 ) {
    throw new KettleValueException( "Unknown column '" + valueName + "'" );
  }

  ValueMetaInterface metaType = rowMeta.getValueMeta( idx );
  // find by source value type
  switch ( metaType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
      return rowMeta.getString( data, idx ) == null;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return rowMeta.getBoolean( data, idx ) == null;
    case ValueMetaInterface.TYPE_INTEGER:
      return rowMeta.getInteger( data, idx ) == null;
    case ValueMetaInterface.TYPE_NUMBER:
      return rowMeta.getNumber( data, idx ) == null;
    case ValueMetaInterface.TYPE_BIGNUMBER:
      return rowMeta.getBigNumber( data, idx ) == null;
    case ValueMetaInterface.TYPE_BINARY:
      return rowMeta.getBinary( data, idx ) == null;
    case ValueMetaInterface.TYPE_DATE:
    case ValueMetaInterface.TYPE_TIMESTAMP:
      return rowMeta.getDate( data, idx ) == null;
    case ValueMetaInterface.TYPE_INET:
      return rowMeta.getString( data, idx ) == null;
  }
  throw new KettleValueException( "Unknown source type: " + metaType.getTypeDesc() );
}
 
Example 9
Source File: HypersonicDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  StringBuilder retval = new StringBuilder( 128 );

  String fieldname = v.getName();
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval.append( fieldname ).append( ' ' );
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval.append( "TIMESTAMP" );
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      if ( supportsBooleanDataType() ) {
        retval.append( "BOOLEAN" );
      } else {
        retval.append( "CHAR(1)" );
      }
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
          fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        retval.append( "BIGINT GENERATED BY DEFAULT AS IDENTITY(START WITH 0, INCREMENT BY 1) PRIMARY KEY" );
      } else {
        if ( length > 0 ) {
          if ( precision > 0 || length > 18 ) {
            retval.append( "NUMERIC(" ).append( length ).append( ", " ).append( precision ).append( ')' );
          } else {
            if ( length > 9 ) {
              retval.append( "BIGINT" );
            } else {
              if ( length < 5 ) {
                retval.append( "SMALLINT" );
              } else {
                retval.append( "INTEGER" );
              }
            }
          }

        } else {
          retval.append( "DOUBLE PRECISION" );
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length >= DatabaseMeta.CLOB_LENGTH ) {
        retval.append( "LONGVARCHAR" );
      } else {
        retval.append( "VARCHAR" );
        if ( length > 0 ) {
          retval.append( '(' ).append( length );
        } else {
          retval.append( '(' ); // Maybe use some default DB String length?
        }
        retval.append( ')' );
      }
      break;
    default:
      retval.append( " UNKNOWN" );
      break;
  }

  if ( addCr ) {
    retval.append( Const.CR );
  }

  return retval.toString();
}
 
Example 10
Source File: BeamBQOutputTransform.java    From kettle-beam with Apache License 2.0 4 votes vote down vote up
@Override public PDone expand( PCollection<KettleRow> input ) {

    try {
      // Only initialize once on this node/vm
      //
      BeamKettle.init( stepPluginClasses, xpPluginClasses );

      // Inflate the metadata on the node where this is running...
      //
      RowMetaInterface rowMeta = JsonRowMeta.fromJson( rowMetaJson );


      // Which table do we write to?
      //
      TableReference tableReference = new TableReference();
      if ( StringUtils.isNotEmpty( projectId ) ) {
        tableReference.setProjectId( projectId );
      }
      tableReference.setDatasetId( datasetId );
      tableReference.setTableId( tableId );

      TableSchema tableSchema = new TableSchema();
      List<TableFieldSchema> schemaFields = new ArrayList<>();
      for ( ValueMetaInterface valueMeta : rowMeta.getValueMetaList() ) {
        TableFieldSchema schemaField = new TableFieldSchema();
        schemaField.setName( valueMeta.getName() );
        switch(valueMeta.getType()){
          case ValueMetaInterface.TYPE_STRING: schemaField.setType( "STRING" ); break;
          case ValueMetaInterface.TYPE_INTEGER: schemaField.setType( "INTEGER" ); break;
          case ValueMetaInterface.TYPE_DATE: schemaField.setType( "DATETIME" ); break;
          case ValueMetaInterface.TYPE_BOOLEAN: schemaField.setType( "BOOLEAN" ); break;
          case ValueMetaInterface.TYPE_NUMBER: schemaField.setType( "FLOAT" ); break;
          default:
            throw new RuntimeException( "Conversion from Kettle value "+valueMeta.toString()+" to BigQuery TableRow isn't supported yet" );
        }
        schemaFields.add(schemaField);
      }
      tableSchema.setFields( schemaFields );

      SerializableFunction<KettleRow, TableRow> formatFunction = new KettleToBQTableRowFn( stepname, rowMetaJson, stepPluginClasses, xpPluginClasses );

      BigQueryIO.Write.CreateDisposition createDisposition;
      if (createIfNeeded) {
        createDisposition = BigQueryIO.Write.CreateDisposition.CREATE_IF_NEEDED;
      }  else {
        createDisposition = BigQueryIO.Write.CreateDisposition.CREATE_NEVER;
      }

      BigQueryIO.Write.WriteDisposition writeDisposition;
      if (truncateTable) {
        writeDisposition = BigQueryIO.Write.WriteDisposition.WRITE_APPEND;
      } else {
        if (failIfNotEmpty) {
          writeDisposition = BigQueryIO.Write.WriteDisposition.WRITE_EMPTY;
        } else {
          writeDisposition = BigQueryIO.Write.WriteDisposition.WRITE_APPEND;
        }
      }

      BigQueryIO.Write<KettleRow> bigQueryWrite = BigQueryIO
        .<KettleRow>write()
        .to( tableReference )
        .withSchema( tableSchema )
        .withCreateDisposition( createDisposition )
        .withWriteDisposition( writeDisposition )
        .withFormatFunction( formatFunction );

      // TODO: pass the results along the way at some point
      //
      input.apply( stepname, bigQueryWrite );

      // End of the line
      //
      return PDone.in( input.getPipeline() );

    } catch ( Exception e ) {
      numErrors.inc();
      LOG.error( "Error in Beam BigQuery output transform", e );
      throw new RuntimeException( "Error in Beam BigQuery output transform", e );
    }
  }
 
Example 11
Source File: AS400DatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      retval += "CHAR(1)";
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( length <= 0 && precision <= 0 ) {
        retval += "DOUBLE";
      } else {
        retval += "DECIMAL";
        if ( length > 0 ) {
          retval += "(" + length;
          if ( precision > 0 ) {
            retval += ", " + precision;
          }
          retval += ")";
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length > getMaxVARCHARLength() || length >= DatabaseMeta.CLOB_LENGTH ) {
        retval += "CLOB";
      } else {
        retval += "VARCHAR";
        if ( length > 0 ) {
          retval += "(" + length;
        } else {
          retval += "("; // Maybe use some default DB String length?
        }
        retval += ")";
      }
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 12
Source File: PentahoAvroInputFormat.java    From pentaho-hadoop-shims with Apache License 2.0 4 votes vote down vote up
public List<? extends IAvroInputField> getDefaultFields() throws Exception {
  ArrayList<AvroInputField> fields = new ArrayList<>();

  Schema avroSchema = readAvroSchema();
  for ( Schema.Field f : avroSchema.getFields() ) {
    AvroSpec.DataType actualAvroType = findActualDataType( f );
    AvroSpec.DataType supportedAvroType = null;
    if ( actualAvroType != null && isSupported( actualAvroType ) ) {
      supportedAvroType = actualAvroType;
    }

    if ( supportedAvroType == null ) {
      // Todo: log a message about skipping unsupported fields
      continue;
    }

    int pentahoType = 0;
    switch ( supportedAvroType ) {
      case DATE:
        pentahoType = ValueMetaInterface.TYPE_DATE;
        break;
      case DOUBLE:
        pentahoType = ValueMetaInterface.TYPE_NUMBER;
        break;
      case FLOAT:
        pentahoType = ValueMetaInterface.TYPE_NUMBER;
        break;
      case LONG:
        pentahoType = ValueMetaInterface.TYPE_INTEGER;
        break;
      case BOOLEAN:
        pentahoType = ValueMetaInterface.TYPE_BOOLEAN;
        break;
      case INTEGER:
        pentahoType = ValueMetaInterface.TYPE_INTEGER;
        break;
      case STRING:
        pentahoType = ValueMetaInterface.TYPE_STRING;
        break;
      case BYTES:
        pentahoType = ValueMetaInterface.TYPE_BINARY;
        break;
      case DECIMAL:
        pentahoType = ValueMetaInterface.TYPE_BIGNUMBER;
        break;
      case TIMESTAMP_MILLIS:
        pentahoType = ValueMetaInterface.TYPE_TIMESTAMP;
        break;
    }

    // If this is a Pentaho 8 Avro field name, use the ValueMetaInterface type encoded in the Avro field name instead
    FieldName fieldName = parseFieldName( f.name() );
    if ( fieldName != null ) {
      pentahoType = fieldName.type;
    }

    AvroInputField avroInputField = new AvroInputField();
    avroInputField.setFormatFieldName( f.name() );
    avroInputField.setPentahoFieldName( avroInputField.getDisplayableAvroFieldName() );
    avroInputField.setFormatFieldName( f.name() );
    avroInputField.setPentahoType( pentahoType );
    avroInputField.setAvroType( actualAvroType );
    fields.add( avroInputField );
  }

  return fields;
}
 
Example 13
Source File: LDAPConnection.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
public RowMeta getFields( String searchBase ) throws KettleException {
  RowMeta fields = new RowMeta();
  List<String> fieldsl = new ArrayList<String>();
  try {
    search( searchBase, null, 0, null, SEARCH_SCOPE_SUBTREE_SCOPE );
    Attributes attributes = null;
    fieldsl = new ArrayList<String>();
    while ( ( attributes = getAttributes() ) != null ) {

      NamingEnumeration<? extends Attribute> ne = attributes.getAll();

      while ( ne.hasMore() ) {
        Attribute attr = ne.next();
        String fieldName = attr.getID();
        if ( !fieldsl.contains( fieldName ) ) {
          fieldsl.add( fieldName );

          String attributeValue = attr.get().toString();
          int valueType;

          // Try to determine the data type
          //
          if ( IsDate( attributeValue ) ) {
            valueType = ValueMetaInterface.TYPE_DATE;
          } else if ( IsInteger( attributeValue ) ) {
            valueType = ValueMetaInterface.TYPE_INTEGER;
          } else if ( IsNumber( attributeValue ) ) {
            valueType = ValueMetaInterface.TYPE_NUMBER;
          } else {
            valueType = ValueMetaInterface.TYPE_STRING;
          }

          ValueMetaInterface value = ValueMetaFactory.createValueMeta( fieldName, valueType );
          fields.addValueMeta( value );
        }
      }
    }
    return fields;
  } catch ( Exception e ) {
    throw new KettleException( BaseMessages.getString( PKG, "LDAPConnection.Error.RetrievingFields" ) );
  } finally {
    fieldsl = null;
  }
}
 
Example 14
Source File: FirebirdDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    if ( Const.indexOfString( fieldname, getReservedWords() ) >= 0 ) {
      retval += getStartQuote() + fieldname + getEndQuote();
    } else {
      retval += fieldname + " ";
    }
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      if ( supportsBooleanDataType() ) {
        retval += "BIT";
      } else {
        retval += "CHAR(1)";
      }
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
        fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        retval += "BIGINT NOT NULL PRIMARY KEY";
      } else {
        if ( length > 0 ) {
          if ( precision > 0 || length > 18 ) {
            retval += "DECIMAL(" + length;
            if ( precision > 0 ) {
              retval += ", " + precision;
            }
            retval += ")";
          } else {
            if ( length > 9 ) {
              retval += "BIGINT";
            } else {
              if ( length < 5 ) {
                retval += "SMALLINT";
              } else {
                retval += "INTEGER";
              }
            }
          }
        } else {
          retval += "DOUBLE";
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length < 32720 ) {
        retval += "VARCHAR";
        if ( length > 0 ) {
          retval += "(" + length + ")";
        } else {
          retval += "(8000)"; // Maybe use some default DB String length?
        }
      } else {
        retval += "BLOB SUB_TYPE TEXT";
      }
      break;
    case ValueMetaInterface.TYPE_BINARY:
      retval += "BLOB";
      break;
    default:
      retval += "UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 15
Source File: TransHistoryDelegate.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
private TableView createTransLogTableView( Composite parent ) {
  List<ColumnInfo> columnList = new ArrayList<ColumnInfo>();

  for ( LogTableField field : logTableFields ) {
    if ( !field.isLogField() ) {
      ColumnInfo column = new ColumnInfo( field.getName(), ColumnInfo.COLUMN_TYPE_TEXT, false, true );
      int valueType = field.getDataType();
      String conversionMask = null;

      switch ( field.getDataType() ) {
        case ValueMetaInterface.TYPE_INTEGER:
          conversionMask = "###,###,##0";
          column.setAllignement( SWT.RIGHT );
          break;
        case ValueMetaInterface.TYPE_DATE:
          conversionMask = "yyyy/MM/dd HH:mm:ss";
          column.setAllignement( SWT.CENTER );
          break;
        case ValueMetaInterface.TYPE_NUMBER:
          conversionMask = " ###,###,##0.00;-###,###,##0.00";
          column.setAllignement( SWT.RIGHT );
          break;
        case ValueMetaInterface.TYPE_STRING:
          column.setAllignement( SWT.LEFT );
          break;
        case ValueMetaInterface.TYPE_BOOLEAN:
          DatabaseMeta databaseMeta = logTable.getDatabaseMeta();
          if ( databaseMeta != null ) {
            if ( !databaseMeta.supportsBooleanDataType() ) {
              // Boolean gets converted to String!
              //
              valueType = ValueMetaInterface.TYPE_STRING;
            }
          }
          break;
        default:
          break;
      }

      ValueMetaInterface valueMeta = new ValueMeta( field.getFieldName(), valueType, field.getLength(), -1 );
      if ( conversionMask != null ) {
        valueMeta.setConversionMask( conversionMask );
      }
      column.setValueMeta( valueMeta );
      columnList.add( column );
    }
  }

  TableView tableView = new TableView( transMeta, parent, SWT.BORDER | SWT.FULL_SELECTION | SWT.SINGLE,
    columnList.toArray( new ColumnInfo[columnList.size()] ), 1,
    true, // readonly!
    null, spoon.props );

  tableView.table.addSelectionListener( new SelectionAdapter() {
    @Override
    public void widgetSelected( SelectionEvent arg0 ) {
      showLogEntry();
    }
  } );

  return tableView;
}
 
Example 16
Source File: EnterValueDialog.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
protected void setFormats() {
  // What is the selected type?
  //
  // The index must be set on the combobox after
  // calling setItems(), otherwise the zeroth element
  // is displayed, but the selectedIndex will be -1.

  int formatIndex = wFormat.getSelectionIndex();
  String formatString = formatIndex >= 0 ? wFormat.getItem( formatIndex ) : "";
  int type = ValueMetaFactory.getIdForValueMeta( wValueType.getText() );
  String string = wInputString.getText();

  // remove white spaces if not a string field
  if ( ( type != ValueMetaInterface.TYPE_STRING ) && ( string.startsWith( " " ) || string.endsWith( " " ) ) ) {
    string = Const.trim( string );
    wInputString.setText( string );
  }
  switch ( type ) {
    case ValueMetaInterface.TYPE_INTEGER:
      wFormat.setItems( Const.getNumberFormats() );
      int index = ( !Utils.isEmpty( formatString ) ) ? wFormat.indexOf( formatString ) : wFormat.indexOf( "#" );
      // ... then we have a custom format mask
      if ( ( !Utils.isEmpty( formatString ) ) && ( index < 0 ) ) {
        wFormat.add( formatString );
        index = wFormat.indexOf( formatString );
      }
      wFormat.select( index ); // default
      break;
    case ValueMetaInterface.TYPE_NUMBER:
      wFormat.setItems( Const.getNumberFormats() );
      index = ( !Utils.isEmpty( formatString ) ) ? wFormat.indexOf( formatString ) : wFormat.indexOf( "#.#" );
      // ... then we have a custom format mask
      if ( ( !Utils.isEmpty( formatString ) ) && ( index < 0 ) ) {
        wFormat.add( formatString );
        index = wFormat.indexOf( formatString );
      }
      wFormat.select( index ); // default
      break;
    case ValueMetaInterface.TYPE_DATE:
      wFormat.setItems( Const.getDateFormats() );
      index =
        ( !Utils.isEmpty( formatString ) ) ? wFormat.indexOf( formatString ) : wFormat
          .indexOf( "yyyy/MM/dd HH:mm:ss" ); // default;
      // ... then we have a custom format mask
      if ( ( !Utils.isEmpty( formatString ) ) && ( index < 0 ) ) {
        wFormat.add( formatString );
        index = wFormat.indexOf( formatString );
      }
      wFormat.select( index ); // default
      break;
    case ValueMetaInterface.TYPE_BIGNUMBER:
      wFormat.setItems( new String[] {} );
      break;
    default:
      wFormat.setItems( new String[] {} );
      break;
  }
}
 
Example 17
Source File: ValueMetaBase.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
/**
 * Get a value from a result set column based on the current value metadata
 *
 * @param databaseInterface
 *          the database metadata to use
 * @param resultSet
 *          The JDBC result set to read from
 * @param index
 *          The column index (1-based)
 * @return The Kettle native data type based on the value metadata
 * @throws KettleDatabaseException
 *           in case something goes wrong.
 */
@Override
public Object getValueFromResultSet( DatabaseInterface databaseInterface, ResultSet resultSet, int index )
  throws KettleDatabaseException {
  try {
    Object data = null;

    switch ( getType() ) {
      case ValueMetaInterface.TYPE_BOOLEAN:
        data = Boolean.valueOf( resultSet.getBoolean( index + 1 ) );
        break;
      case ValueMetaInterface.TYPE_NUMBER:
        data = new Double( resultSet.getDouble( index + 1 ) );
        break;
      case ValueMetaInterface.TYPE_BIGNUMBER:
        data = resultSet.getBigDecimal( index + 1 );
        break;
      case ValueMetaInterface.TYPE_INTEGER:
        data = Long.valueOf( resultSet.getLong( index + 1 ) );
        break;
      case ValueMetaInterface.TYPE_STRING:
        if ( isStorageBinaryString() ) {
          data = resultSet.getBytes( index + 1 );
        } else {
          data = resultSet.getString( index + 1 );
        }
        break;
      case ValueMetaInterface.TYPE_BINARY:
        if ( databaseInterface.supportsGetBlob() ) {
          Blob blob = resultSet.getBlob( index + 1 );
          if ( blob != null ) {
            data = blob.getBytes( 1L, (int) blob.length() );
          } else {
            data = null;
          }
        } else {
          data = resultSet.getBytes( index + 1 );
        }
        break;

      case ValueMetaInterface.TYPE_DATE:
        if ( getPrecision() != 1 && databaseInterface.supportsTimeStampToDateConversion() ) {
          data = resultSet.getTimestamp( index + 1 );
          break; // Timestamp extends java.util.Date
        } else if ( databaseInterface instanceof NetezzaDatabaseMeta ) {
          // PDI-10877 workaround for IBM netezza jdbc 'special' implementation
          data = getNetezzaDateValueWorkaround( databaseInterface, resultSet, index + 1 );
          break;
        } else {
          data = resultSet.getDate( index + 1 );
          break;
        }
      default:
        break;
    }
    if ( resultSet.wasNull() ) {
      data = null;
    }
    return data;
  } catch ( SQLException e ) {
    throw new KettleDatabaseException( "Unable to get value '" + toStringMeta() + "' from database resultset, index "
        + index, e );
  }

}
 
Example 18
Source File: TestCWM.java    From pentaho-metadata with GNU Lesser General Public License v2.1 4 votes vote down vote up
public void storeTable() {
  CwmTable table = cwm.getTable( TEST_TABLE_NAME );
  if ( table == null ) {
    System.out.println( "Table [" + TEST_TABLE_NAME + "] not found: creating..." ); //$NON-NLS-1$ //$NON-NLS-2$
  } else {
    System.out.println( "Table [" + TEST_TABLE_NAME + "] found: overwriting..." ); //$NON-NLS-1$ //$NON-NLS-2$
  }
  cwm.beginTransaction();

  RowMetaInterface fields = new RowMeta();
  ValueMetaInterface field1 = new ValueMeta( "field1", ValueMetaInterface.TYPE_STRING ); //$NON-NLS-1$
  field1.setLength( 35 );
  field1.setOrigin( "field1 description" ); //$NON-NLS-1$
  fields.addValueMeta( field1 );
  ValueMetaInterface field2 = new ValueMeta( "field2", ValueMetaInterface.TYPE_NUMBER ); //$NON-NLS-1$
  field2.setLength( 7, 2 );
  field2.setOrigin( "field2 description" ); //$NON-NLS-1$
  fields.addValueMeta( field2 );
  ValueMetaInterface field3 = new ValueMeta( "field3", ValueMetaInterface.TYPE_INTEGER );
  field3.setLength( 5 );
  field3.setOrigin( "field3 description" );
  fields.addValueMeta( field3 );
  ValueMetaInterface field4 = new ValueMeta( "field4", ValueMetaInterface.TYPE_DATE );
  field4.setOrigin( "field4 description" );
  fields.addValueMeta( field4 );
  ValueMetaInterface field5 = new ValueMeta( "field5", ValueMetaInterface.TYPE_BIGNUMBER );
  field5.setLength( 52, 16 );
  field5.setOrigin( "field5 description" );
  fields.addValueMeta( field5 );
  ValueMetaInterface field6 = new ValueMeta( "field6", ValueMetaInterface.TYPE_BOOLEAN );
  field6.setOrigin( "field6 description" );
  fields.addValueMeta( field6 );

  table = cwm.createTable( TEST_TABLE_NAME, fields );

  // Add descriptions to table and columns...

  CwmDescription description = cwm.createDescription( "This is a table description" ); //$NON-NLS-1$
  cwm.setDescription( table, description );
  @SuppressWarnings( "unchecked" )
  Collection<CwmColumn> collection = table.getOwnedElement();
  CwmColumn[] columns = (CwmColumn[]) collection.toArray( new CwmColumn[collection.size()] );

  for ( int i = 0; i < fields.size(); i++ ) {
    ValueMetaInterface field = fields.getValueMeta( i );
    CwmColumn column = columns[i];

    // Add a description to the column
    //
    description = cwm.createDescription( field.getOrigin() );
    cwm.setDescription( column, description );
  }

  // Try to create a package here...
  CwmPackage p = cwm.createPackage( DOMAIN + " package" ); //$NON-NLS-1$
  @SuppressWarnings( "unchecked" )
  Collection<CwmTable> ca = p.getImportedElement();
  ca.add( table );
  cwm.setDescription( p, cwm.createDescription( "This is a package description for [" + DOMAIN + "]" ) ); //$NON-NLS-1$ //$NON-NLS-2$

  cwm.endTransaction();

  System.out.println( "Finished writing to table [" + TEST_TABLE_NAME + "]." ); //$NON-NLS-1$ //$NON-NLS-2$
}
 
Example 19
Source File: PostgreSQLDatabaseMeta.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
@Override
public String getFieldDefinition( ValueMetaInterface v, String tk, String pk, boolean useAutoinc,
                                  boolean addFieldName, boolean addCr ) {
  String retval = "";

  String fieldname = v.getName();
  if ( v.getLength() == DatabaseMeta.CLOB_LENGTH ) {
    v.setLength( getMaxTextFieldLength() );
  }
  int length = v.getLength();
  int precision = v.getPrecision();

  if ( addFieldName ) {
    retval += fieldname + " ";
  }

  int type = v.getType();
  switch ( type ) {
    case ValueMetaInterface.TYPE_TIMESTAMP:
    case ValueMetaInterface.TYPE_DATE:
      retval += "TIMESTAMP";
      break;
    case ValueMetaInterface.TYPE_BOOLEAN:
      if ( supportsBooleanDataType() ) {
        retval += "BOOLEAN";
      } else {
        retval += "CHAR(1)";
      }
      break;
    case ValueMetaInterface.TYPE_NUMBER:
    case ValueMetaInterface.TYPE_INTEGER:
    case ValueMetaInterface.TYPE_BIGNUMBER:
      if ( fieldname.equalsIgnoreCase( tk ) || // Technical key
          fieldname.equalsIgnoreCase( pk ) // Primary key
      ) {
        retval += "BIGSERIAL";
      } else {
        if ( length > 0 ) {
          if ( precision > 0 || length > 18 ) {
            // Numeric(Precision, Scale): Precision = total length; Scale = decimal places
            retval += "NUMERIC(" + ( length + precision ) + ", " + precision + ")";
          } else {
            if ( length > 9 ) {
              retval += "BIGINT";
            } else {
              if ( length < 5 ) {
                retval += "SMALLINT";
              } else {
                retval += "INTEGER";
              }
            }
          }

        } else {
          retval += "DOUBLE PRECISION";
        }
      }
      break;
    case ValueMetaInterface.TYPE_STRING:
      if ( length < 1 || length >= DatabaseMeta.CLOB_LENGTH ) {
        retval += "TEXT";
      } else {
        retval += "VARCHAR(" + length + ")";
      }
      break;
    default:
      retval += " UNKNOWN";
      break;
  }

  if ( addCr ) {
    retval += Const.CR;
  }

  return retval;
}
 
Example 20
Source File: ValueMetaConverter.java    From pentaho-kettle with Apache License 2.0 4 votes vote down vote up
protected Object convertFromIntegerMetaInterface( int targetValueMetaType, Object value )
  throws ValueMetaConversionException {

  if ( value == null ) {
    return value;
  }

  if ( !( value instanceof Long ) ) {
    handleConversionError(
      "Error.  Expecting value of type Long.    actual value type = '" + value.getClass() + "'.    value = '" + value
        + "'." );
  }

  try {
    switch ( targetValueMetaType ) {
      case ValueMetaInterface.TYPE_STRING:
        return Long.toString( (Long) value );
      case ValueMetaInterface.TYPE_INTEGER:
        return new Long( (Long) value );
      case ValueMetaInterface.TYPE_NUMBER:
        Double doubleValue = ( (Long) value ).doubleValue();
        if ( getPrecision() > 0 ) {
          BigDecimal bigDecimal = new BigDecimal( doubleValue );
          bigDecimal = bigDecimal.setScale( getPrecision(), RoundingMode.HALF_UP );
          doubleValue = bigDecimal.doubleValue();
        }
        return doubleValue;
      case ValueMetaInterface.TYPE_BIGNUMBER:
        return new BigDecimal( ( (Long) value ).doubleValue() );
      case ValueMetaInterface.TYPE_DATE:
        return new Date( (long) value );
      case ValueMetaInterface.TYPE_TIMESTAMP:
        return new Timestamp( (long) value );
      default:
        throwBadConversionCombination( ValueMetaInterface.TYPE_INTEGER, targetValueMetaType, value );
    }
  } catch ( Exception e ) {
    throwErroredConversion( ValueMetaInterface.TYPE_INTEGER, targetValueMetaType, value, e );
  }
  return value;
}