Java Code Examples for org.apache.pig.data.DataType#BOOLEAN

The following examples show how to use org.apache.pig.data.DataType#BOOLEAN . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SchemaTupleClassGenerator.java    From spork with Apache License 2.0 6 votes vote down vote up
@Override
public void process(int fieldPos, Schema.FieldSchema fs) {
    add("public "+typeName()+" getDummy_"+fieldPos+"() {");
    switch (fs.type) {
    case (DataType.INTEGER): add("    return 0;"); break;
    case (DataType.LONG): add("    return 0L;"); break;
    case (DataType.FLOAT): add("    return 0.0f;"); break;
    case (DataType.DOUBLE): add("    return 0.0;"); break;
    case (DataType.BOOLEAN): add("    return true;"); break;
    case (DataType.DATETIME): add("    return new DateTime();"); break;
    case (DataType.BIGDECIMAL): add("    return (BigDecimal)null;"); break;
    case (DataType.BIGINTEGER): add("    return (BigInteger)null;"); break;
    case (DataType.BYTEARRAY): add("    return (byte[])null;"); break;
    case (DataType.CHARARRAY): add("    return (String)null;"); break;
    case (DataType.TUPLE): add("    return (Tuple)null;"); break;
    case (DataType.BAG): add("    return (DataBag)null;"); break;
    case (DataType.MAP): add("    return (Map<String,Object>)null;"); break;
    default: throw new RuntimeException("Unsupported type");
    }
    add("}");
    addBreak();
}
 
Example 2
Source File: HiveRCSchemaUtil.java    From spork with Apache License 2.0 6 votes vote down vote up
/**
    * Returns the pig DataType for the hive type
    * 
    * @param hiveType
    * @return byte from DataType
    */
   public static byte findPigDataType(String hiveType) {
hiveType = hiveType.toLowerCase();

if (hiveType.equals("string"))
    return DataType.CHARARRAY;
else if (hiveType.equals("int"))
    return DataType.INTEGER;
else if (hiveType.equals("bigint") || hiveType.equals("long"))
    return DataType.LONG;
else if (hiveType.equals("float"))
    return DataType.FLOAT;
else if (hiveType.equals("double"))
    return DataType.DOUBLE;
else if (hiveType.equals("boolean"))
    return DataType.BOOLEAN;
else if (hiveType.equals("byte"))
    return DataType.INTEGER;
else if (hiveType.contains("array"))
    return DataType.TUPLE;
else if (hiveType.contains("map"))
    return DataType.MAP;
else
    return DataType.ERROR;
   }
 
Example 3
Source File: PigSchema2Avro.java    From spork with Apache License 2.0 6 votes vote down vote up
/**
 * Convert Pig primitive type to Avro type
 * 
 */
protected static Schema convertPrimitiveType(byte pigType) throws IOException {

    if (pigType == DataType.BOOLEAN) {
        return AvroStorageUtils.BooleanSchema;
    } else if (pigType == DataType.BYTEARRAY) {
        return AvroStorageUtils.BytesSchema;
    } else if (pigType == DataType.CHARARRAY
                                    || pigType == DataType.BIGCHARARRAY) {
        return AvroStorageUtils.StringSchema;
    } else if (pigType == DataType.DOUBLE) {
        return AvroStorageUtils.DoubleSchema;
    } else if (pigType == DataType.FLOAT) {
        return AvroStorageUtils.FloatSchema;
    } else if (pigType == DataType.INTEGER) {
        return AvroStorageUtils.IntSchema;
    } else if (pigType == DataType.LONG) {
        return AvroStorageUtils.LongSchema;
    } else
        throw new IOException("unsupported pig type:"
                                        + DataType.findTypeName(pigType));

}
 
Example 4
Source File: PigSchema2Avro.java    From Cubert with Apache License 2.0 6 votes vote down vote up
/**
 * Convert Pig primitive type to Avro type
 * 
 */
protected static Schema convertPrimitiveType(byte pigType) throws IOException {

    if (pigType == DataType.BOOLEAN) {
        return AvroStorageUtils.BooleanSchema;
    } else if (pigType == DataType.BYTEARRAY) {
        return AvroStorageUtils.BytesSchema;
    } else if (pigType == DataType.CHARARRAY
                                    || pigType == DataType.BIGCHARARRAY) {
        return AvroStorageUtils.StringSchema;
    } else if (pigType == DataType.DOUBLE) {
        return AvroStorageUtils.DoubleSchema;
    } else if (pigType == DataType.FLOAT) {
        return AvroStorageUtils.FloatSchema;
    } else if (pigType == DataType.INTEGER) {
        return AvroStorageUtils.IntSchema;
    } else if (pigType == DataType.LONG) {
        return AvroStorageUtils.LongSchema;
    } else
        throw new IOException("unsupported pig type:"
                                        + DataType.findTypeName(pigType));

}
 
Example 5
Source File: IsNullExpression.java    From spork with Apache License 2.0 5 votes vote down vote up
@Override
public LogicalSchema.LogicalFieldSchema getFieldSchema() throws FrontendException {
    if (fieldSchema!=null)
        return fieldSchema;
    fieldSchema = new LogicalSchema.LogicalFieldSchema(null, null, DataType.BOOLEAN);
    uidOnlyFieldSchema = fieldSchema.mergeUid(uidOnlyFieldSchema);
    return fieldSchema;
}
 
Example 6
Source File: AugmentBaseDataVisitor.java    From spork with Apache License 2.0 5 votes vote down vote up
Object generateData(byte type, String data) {
    switch (type) {
    case DataType.BOOLEAN:
        if (data.equalsIgnoreCase("true")) {
            return Boolean.TRUE;
        } else if (data.equalsIgnoreCase("false")) {
            return Boolean.FALSE;
        } else {
            return null;
        }
    case DataType.BYTEARRAY:
        return new DataByteArray(data.getBytes());
    case DataType.DOUBLE:
        return Double.valueOf(data);
    case DataType.FLOAT:
        return Float.valueOf(data);
    case DataType.INTEGER:
        return Integer.valueOf(data);
    case DataType.LONG:
        return Long.valueOf(data);
    case DataType.BIGINTEGER:
        return new BigInteger(data);
    case DataType.BIGDECIMAL:
        return new BigDecimal(data);
    case DataType.DATETIME:
        return new DateTime(data);
    case DataType.CHARARRAY:
        return data;
    default:
        return null;
    }
}
 
Example 7
Source File: OrExpression.java    From spork with Apache License 2.0 5 votes vote down vote up
@Override
public LogicalSchema.LogicalFieldSchema getFieldSchema() throws FrontendException {
    if (fieldSchema!=null)
        return fieldSchema;
    fieldSchema = new LogicalSchema.LogicalFieldSchema(null, null, DataType.BOOLEAN);
    uidOnlyFieldSchema = fieldSchema.mergeUid(uidOnlyFieldSchema);
    return fieldSchema;
}
 
Example 8
Source File: TOBAG.java    From spork with Apache License 2.0 5 votes vote down vote up
public TOBAG(String bagColName, String tupleColName, String fieldType) {

        if ( bagColName== null || tupleColName == null  || fieldType == null)  {
	    throw new RuntimeException("The bagColName  and fieldType cannot be null");
        }

    	this.bagColName   = bagColName;	
    	this.tupleColName = tupleColName;	

        if ( fieldType.equalsIgnoreCase( "CHARARRAY" )){ 
             this.fieldType = DataType.CHARARRAY;

        } else if ( fieldType.equalsIgnoreCase( "DOUBLE" )){ 
            this.fieldType = DataType.DOUBLE;

        } else if ( fieldType.equalsIgnoreCase( "FLOAT" )){ 
            this.fieldType = DataType.FLOAT; 

        } else if ( fieldType.equalsIgnoreCase( "BOOLEAN" )){ 
            this.fieldType = DataType.BOOLEAN; 

        } else if ( fieldType.equalsIgnoreCase( "INTEGER" )){ 
            this.fieldType = DataType.INTEGER;

        } else if ( fieldType.equalsIgnoreCase( "LONG" )){ 
            this.fieldType = DataType.LONG; 

        } else if ( fieldType.equalsIgnoreCase( "MAP" )){ 
            this.fieldType = DataType.MAP; 
        } else {
	    throw new RuntimeException("This type"+ fieldType +"is not supported in TOBAG");
        }

    }
 
Example 9
Source File: HDataType.java    From spork with Apache License 2.0 5 votes vote down vote up
public static byte findTypeFromNullableWritable(PigNullableWritable o) throws ExecException {
    if (o instanceof NullableBooleanWritable)
        return DataType.BOOLEAN;
    else if (o instanceof NullableBytesWritable)
        return DataType.BYTEARRAY;
    else if (o instanceof NullableText)
        return DataType.CHARARRAY;
    else if (o instanceof NullableFloatWritable)
        return DataType.FLOAT;
    else if (o instanceof NullableDoubleWritable)
        return DataType.DOUBLE;
    else if (o instanceof NullableIntWritable)
        return DataType.INTEGER;
    else if (o instanceof NullableLongWritable)
        return DataType.LONG;
    else if (o instanceof NullableBigIntegerWritable)
        return DataType.BIGINTEGER;
    else if (o instanceof NullableBigDecimalWritable)
        return DataType.BIGDECIMAL;
    else if (o instanceof NullableDateTimeWritable)
        return DataType.DATETIME;
    else if (o instanceof NullableBag)
        return DataType.BAG;
    else if (o instanceof NullableTuple)
        return DataType.TUPLE;
    else {
        int errCode = 2044;
        String msg = "Cannot find Pig type for " + o.getClass().getName();
        throw new ExecException(msg, errCode, PigException.BUG);
    }
}
 
Example 10
Source File: NotEqualExpression.java    From spork with Apache License 2.0 5 votes vote down vote up
@Override
public LogicalSchema.LogicalFieldSchema getFieldSchema() throws FrontendException {
    if (fieldSchema!=null)
        return fieldSchema;
    fieldSchema = new LogicalSchema.LogicalFieldSchema(null, null, DataType.BOOLEAN);
    uidOnlyFieldSchema = fieldSchema.mergeUid(uidOnlyFieldSchema);
    return fieldSchema;
}
 
Example 11
Source File: TypeUtil.java    From phoenix with Apache License 2.0 4 votes vote down vote up
/**
 * Transforms the PhoenixRecord to Pig {@link Tuple}.
 * @param record
 * @param projectedColumns
 * @return
 * @throws IOException
 */
public static Tuple transformToTuple(final PhoenixPigDBWritable record, final ResourceFieldSchema[] projectedColumns) throws IOException {
    
    List<Object> columnValues = record.getValues();
    if(columnValues == null || columnValues.size() == 0 || projectedColumns == null || projectedColumns.length != columnValues.size()) {
        return null;
    }
    int columns = columnValues.size();
    Tuple tuple = TupleFactory.getInstance().newTuple(columns);
    try {
        for(int i = 0 ; i < columns ; i++) {
            final ResourceFieldSchema fieldSchema = projectedColumns[i];
            Object object = columnValues.get(i);
            if (object == null) {
                tuple.set(i, null);
                continue;
            }
            
            switch(fieldSchema.getType()) {
                case DataType.BYTEARRAY:
                    byte[] bytes = PDataType.fromTypeId(PBinary.INSTANCE.getSqlType()).toBytes(object);
                    tuple.set(i,new DataByteArray(bytes,0,bytes.length));
                    break;
                case DataType.CHARARRAY:
                    tuple.set(i,DataType.toString(object));
                    break;
                case DataType.DOUBLE:
                    tuple.set(i,DataType.toDouble(object));
                    break;
                case DataType.FLOAT:
                    tuple.set(i,DataType.toFloat(object));
                    break;
                case DataType.INTEGER:
                    tuple.set(i,DataType.toInteger(object));
                    break;
                case DataType.LONG:
                    tuple.set(i,DataType.toLong(object));
                    break;
                case DataType.BOOLEAN:
                    tuple.set(i,DataType.toBoolean(object));
                    break;
                case DataType.DATETIME:
                    tuple.set(i,DataType.toDateTime(object));
                    break;
                default:
                    throw new RuntimeException(String.format(" Not supported [%s] pig type" , fieldSchema));
            }
        }
    } catch( Exception ex) {
        final String errorMsg = String.format(" Error transforming PhoenixRecord to Tuple [%s] ", ex.getMessage());
        LOG.error(errorMsg);
        throw new PigException(errorMsg);
    }
      return tuple;
}
 
Example 12
Source File: TestPackage.java    From spork with Apache License 2.0 4 votes vote down vote up
/**
 * To show that it does not have any type specific
 * code
 */
private void pickTest(byte t, boolean[] inner) throws ExecException, IOException {
    Random r = new Random();
    switch (t) {
    case DataType.BAG:
        runTest(GenRandomData.genRandSmallTupDataBag(r, 10, 100), inner, DataType.BAG);
        break;
    case DataType.BOOLEAN:
        runTest(r.nextBoolean(), inner, DataType.BOOLEAN);
        break;
    case DataType.BYTEARRAY:
        runTest(GenRandomData.genRandDBA(r), inner, DataType.BYTEARRAY);
        break;
    case DataType.BIGCHARARRAY: {
        String s = GenRandomData.genRandString(r);
        for (; s.length() < 65535;) {
            s += GenRandomData.genRandString(r);
        }
        runTest(s, inner, DataType.CHARARRAY);
        break;
    }
    case DataType.CHARARRAY:
        runTest(GenRandomData.genRandString(r), inner, DataType.CHARARRAY);
        break;
    case DataType.DOUBLE:
        runTest(r.nextDouble(), inner, DataType.DOUBLE);
        break;
    case DataType.FLOAT:
        runTest(r.nextFloat(), inner, DataType.FLOAT);
        break;
    case DataType.INTEGER:
        runTest(r.nextInt(), inner, DataType.INTEGER);
        break;
    case DataType.LONG:
        runTest(r.nextLong(), inner, DataType.LONG);
        break;
    case DataType.DATETIME:
        runTest(new DateTime(r.nextLong()), inner, DataType.DATETIME);
        break;
    case DataType.MAP:
    case DataType.INTERNALMAP:
    case DataType.BYTE:
        return; // map not key type
    case DataType.TUPLE:
        runTest(GenRandomData.genRandSmallBagTuple(r, 10, 100), inner, DataType.TUPLE);
        break;
    case DataType.BIGINTEGER:
        runTest(new BigInteger(256, r), inner, DataType.BIGINTEGER);
        break;
    case DataType.BIGDECIMAL:
        runTest(new BigDecimal(r.nextDouble()), inner, DataType.BIGDECIMAL);
        break;
    default:
        fail("No test case for type " + DataType.findTypeName(t));
    }
}
 
Example 13
Source File: TupleConverter.java    From parquet-mr with Apache License 2.0 4 votes vote down vote up
static Converter newConverter(FieldSchema pigField, Type type, final ParentValueContainer parent, boolean elephantBirdCompatible, boolean columnIndexAccess) {
  try {
    switch (pigField.type) {
    case DataType.BAG:
      return new BagConverter(type.asGroupType(), pigField, parent, elephantBirdCompatible, columnIndexAccess);
    case DataType.MAP:
      return new MapConverter(type.asGroupType(), pigField, parent, elephantBirdCompatible, columnIndexAccess);
    case DataType.TUPLE:
      return new TupleConverter(type.asGroupType(), pigField.schema, elephantBirdCompatible, columnIndexAccess) {
        @Override
        public void end() {
          super.end();
          parent.add(this.currentTuple);
        }
      };
    case DataType.CHARARRAY:
        //If the orignal type isn't a string, we don't want to use the dictionary because
        //a custom implementation will be needed for each type.  Just default to no dictionary.
      return new FieldStringConverter(parent, type.getLogicalTypeAnnotation() instanceof LogicalTypeAnnotation.StringLogicalTypeAnnotation);
    case DataType.BYTEARRAY:
      return new FieldByteArrayConverter(parent);
    case DataType.INTEGER:
      return new FieldIntegerConverter(parent);
    case DataType.BOOLEAN:
      if (elephantBirdCompatible) {
        return new FieldIntegerConverter(parent);
      } else {
        return new FieldBooleanConverter(parent);
      }
    case DataType.FLOAT:
      return new FieldFloatConverter(parent);
    case DataType.DOUBLE:
      return new FieldDoubleConverter(parent);
    case DataType.LONG:
      return new FieldLongConverter(parent);
    case DataType.BIGDECIMAL:
      return new FieldBigDecimalConverter(type, parent);
    default:
      throw new TupleConversionException("unsupported pig type: " + pigField);
    }
  } catch (FrontendException | RuntimeException e) {
    throw new TupleConversionException(
        "error while preparing converter for:\n" + pigField + "\n" + type, e);
  }
}
 
Example 14
Source File: VespaQuerySchema.java    From vespa with Apache License 2.0 4 votes vote down vote up
public Tuple buildTuple(int rank, JsonNode hit) {
    Tuple tuple = TupleFactory.getInstance().newTuple();

    for (VespaQuerySchema.AliasTypePair tupleElement : tupleSchema) {
        String alias = tupleElement.getAlias();
        Byte type = DataType.findTypeByName(tupleElement.getType());

        // reserved word
        if ("rank".equals(alias)) {
            tuple.append(rank);
        } else {
            JsonNode field = hit;
            String[] path = alias.split("/"); // move outside
            for (String p : path) {
                field = field.get(p);
                if (field == null) {
                    type = DataType.NULL; // effectively skip field as it is not found
                    break;
                }
            }
            switch (type) {
                case DataType.BOOLEAN:
                    tuple.append(field.asBoolean());
                    break;
                case DataType.INTEGER:
                    tuple.append(field.asInt());
                    break;
                case DataType.LONG:
                    tuple.append(field.asLong());
                    break;
                case DataType.FLOAT:
                case DataType.DOUBLE:
                    tuple.append(field.asDouble());
                    break;
                case DataType.DATETIME:
                    tuple.append(field.asText());
                    break;
                case DataType.CHARARRAY:
                    tuple.append(field.asText());
                    break;
                default:
                    // the rest of the data types are currently not supported
            }
        }
    }
    return tuple;
}
 
Example 15
Source File: LogicalSchema.java    From spork with Apache License 2.0 4 votes vote down vote up
/**
 * Check if FieldSchema inFs is castable to outFs
 * @param inFs
 * @param outFs
 * @return true if it is castable
 */
public static boolean castable(LogicalFieldSchema inFs,
        LogicalFieldSchema outFs) {
    
    if(outFs == null && inFs == null) {
        return false;
    }
    
    if (outFs == null) {
        return false ;
    }
    
    if (inFs == null) {
        return false ;
    }
    byte inType = inFs.type;
    byte outType = outFs.type;
    
    if (DataType.isSchemaType(outFs.type)) {
        if(inType == DataType.BYTEARRAY) {
            // good
        } else if (inType == outType) {
            // Don't do the comparison if either input inner schema 
            // is null/empty or  both inner schemas are
            // null.  That will cause Schema.equals to return false,
            // even though we want to view that as true.
            if (!(inFs.schema == null || inFs.schema.size() == 0 || 
                    (outFs.schema == null && inFs.schema == null))) { 
                // compare recursively using schema
                if (!LogicalSchema.castable(inFs.schema, outFs.schema)) {
                    return false ;
                }
            }
        } else {
            return false;
        }
    } else {
        if (inType == outType) {
            // good
        }
        else if (inType == DataType.BOOLEAN && (outType == DataType.CHARARRAY
                || outType == DataType.BYTEARRAY || DataType.isNumberType(outType))) {
            // good
        }
        else if (DataType.isNumberType(inType) && (outType == DataType.CHARARRAY
                || outType == DataType.BYTEARRAY || DataType.isNumberType(outType))
                || outType == DataType.BOOLEAN) {
            // good
        }
        else if (inType == DataType.CHARARRAY && (outType == DataType.BYTEARRAY
                || DataType.isNumberType(outType)) || outType == DataType.BOOLEAN) {
            // good
        }
        else if (inType == DataType.BYTEARRAY) {
            // good
        }
        else {
            return false;
        }
    }
    
    return true ;
}
 
Example 16
Source File: TezDagBuilder.java    From spork with Apache License 2.0 4 votes vote down vote up
private static Class<? extends WritableComparator> getGroupingComparatorForKeyType(byte keyType)
        throws JobCreationException {

    switch (keyType) {
    case DataType.BOOLEAN:
        return PigGroupingBooleanWritableComparator.class;

    case DataType.INTEGER:
        return PigGroupingIntWritableComparator.class;

    case DataType.BIGINTEGER:
        return PigGroupingBigIntegerWritableComparator.class;

    case DataType.BIGDECIMAL:
        return PigGroupingBigDecimalWritableComparator.class;

    case DataType.LONG:
        return PigGroupingLongWritableComparator.class;

    case DataType.FLOAT:
        return PigGroupingFloatWritableComparator.class;

    case DataType.DOUBLE:
        return PigGroupingDoubleWritableComparator.class;

    case DataType.DATETIME:
        return PigGroupingDateTimeWritableComparator.class;

    case DataType.CHARARRAY:
        return PigGroupingCharArrayWritableComparator.class;

    case DataType.BYTEARRAY:
        return PigGroupingDBAWritableComparator.class;

    case DataType.MAP:
        int errCode = 1068;
        String msg = "Using Map as key not supported.";
        throw new JobCreationException(msg, errCode, PigException.INPUT);

    case DataType.TUPLE:
        return PigGroupingTupleWritableComparator.class;

    case DataType.BAG:
        errCode = 1068;
        msg = "Using Bag as key not supported.";
        throw new JobCreationException(msg, errCode, PigException.INPUT);

    default:
        errCode = 2036;
        msg = "Unhandled key type " + DataType.findTypeName(keyType);
        throw new JobCreationException(msg, errCode, PigException.BUG);
    }
}
 
Example 17
Source File: TezDagBuilder.java    From spork with Apache License 2.0 4 votes vote down vote up
private static Class<? extends WritableComparator> comparatorForKeyType(byte keyType, boolean hasOrderBy)
        throws JobCreationException {

    switch (keyType) {
    case DataType.BOOLEAN:
        return PigBooleanRawComparator.class;

    case DataType.INTEGER:
        return PigIntRawComparator.class;

    case DataType.BIGINTEGER:
        return PigBigIntegerRawComparator.class;

    case DataType.BIGDECIMAL:
        return PigBigDecimalRawComparator.class;

    case DataType.LONG:
        return PigLongRawComparator.class;

    case DataType.FLOAT:
        return PigFloatRawComparator.class;

    case DataType.DOUBLE:
        return PigDoubleRawComparator.class;

    case DataType.DATETIME:
        return PigDateTimeRawComparator.class;

    case DataType.CHARARRAY:
        return PigTextRawComparator.class;

    case DataType.BYTEARRAY:
        //if (hasOrderBy) {
            return PigBytesRawComparator.class;
        //} else {
        //    return PigDBAWritableComparator.class;
        //}

    case DataType.MAP:
        int errCode = 1068;
        String msg = "Using Map as key not supported.";
        throw new JobCreationException(msg, errCode, PigException.INPUT);

    case DataType.TUPLE:
        //TODO: PigTupleWritableComparator gives wrong results with cogroup in
        //Checkin_2 and few other e2e tests. But MR has PigTupleWritableComparator
        //Investigate the difference later
        //if (hasOrderBy) {
            return PigTupleSortComparator.class;
        //} else {
        //    return PigTupleWritableComparator.class;
        //}

    case DataType.BAG:
        errCode = 1068;
        msg = "Using Bag as key not supported.";
        throw new JobCreationException(msg, errCode, PigException.INPUT);

    default:
        errCode = 2036;
        msg = "Unhandled key type " + DataType.findTypeName(keyType);
        throw new JobCreationException(msg, errCode, PigException.BUG);
    }
}
 
Example 18
Source File: IsDouble.java    From spork with Apache License 2.0 4 votes vote down vote up
@Override
public Schema outputSchema(Schema input) {
    return new Schema(new Schema.FieldSchema(null, DataType.BOOLEAN)); 
}
 
Example 19
Source File: AbstractAccumuloStorage.java    From spork with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
protected byte[] objToBytes(Object o, byte type) throws IOException {
    if (o == null)
        return null;
    switch (type) {
    case DataType.BYTEARRAY:
        return ((DataByteArray) o).get();
    case DataType.BAG:
        return caster.toBytes((DataBag) o);
    case DataType.CHARARRAY:
        return caster.toBytes((String) o);
    case DataType.DOUBLE:
        return caster.toBytes((Double) o);
    case DataType.FLOAT:
        return caster.toBytes((Float) o);
    case DataType.INTEGER:
        return caster.toBytes((Integer) o);
    case DataType.LONG:
        return caster.toBytes((Long) o);
    case DataType.BIGINTEGER:
        return caster.toBytes((BigInteger) o);
    case DataType.BIGDECIMAL:
        return caster.toBytes((BigDecimal) o);
    case DataType.BOOLEAN:
        return caster.toBytes((Boolean) o);
    case DataType.DATETIME:
        return caster.toBytes((DateTime) o);

        // The type conversion here is unchecked.
        // Relying on DataType.findType to do the right thing.
    case DataType.MAP:
        return caster.toBytes((Map<String, Object>) o);

    case DataType.NULL:
        return null;
    case DataType.TUPLE:
        return caster.toBytes((Tuple) o);
    case DataType.ERROR:
        throw new IOException("Unable to determine type of " + o.getClass());
    default:
        throw new IOException("Unable to find a converter for tuple field "
                + o);
    }
}
 
Example 20
Source File: Schema.java    From spork with Apache License 2.0 4 votes vote down vote up
/**
 * Recursively compare two schemas to check if the input schema 
 * can be cast to the cast schema
 * @param castFs schema of the cast operator
 * @param  inputFs schema of the cast input
 * @return true or falsew!
 */
public static boolean castable(
        Schema.FieldSchema castFs,
        Schema.FieldSchema inputFs) {
    if(castFs == null && inputFs == null) {
        return false;
    }
    
    if (castFs == null) {
        return false ;
    }
    
    if (inputFs == null) {
        return false ;
    }
    byte inputType = inputFs.type;
    byte castType = castFs.type;
    
    if (DataType.isSchemaType(castFs.type)) {
        if(inputType == DataType.BYTEARRAY) {
            // good
        } else if (inputType == castType) {
            // Don't do the comparison if both embedded schemas are
            // null.  That will cause Schema.equals to return false,
            // even though we want to view that as true.
            if (!(castFs.schema == null && inputFs.schema == null)) { 
                // compare recursively using schema
                if (!Schema.castable(castFs.schema, inputFs.schema)) {
                    return false ;
                }
            }
        } else {
            return false;
        }
    } else {
        if (inputType == castType) {
            // good
        }
        else if (inputType == DataType.BOOLEAN && (castType == DataType.CHARARRAY
                || castType == DataType.BYTEARRAY || DataType.isNumberType(castType))) {
            // good
        }
        else if (DataType.isNumberType(inputType) && (castType == DataType.CHARARRAY
                || castType == DataType.BYTEARRAY || DataType.isNumberType(castType)
                || castType == DataType.BOOLEAN || castType == DataType.DATETIME)) {
            // good
        }
        else if (inputType == DataType.DATETIME && (castType == DataType.CHARARRAY
                || castType == DataType.BYTEARRAY || DataType.isNumberType(castType))) {
            // good
        }
        else if (inputType == DataType.CHARARRAY && (castType == DataType.BYTEARRAY
                || DataType.isNumberType(castType) || castType == DataType.BOOLEAN
                || castType == DataType.DATETIME)) {
            // good
        } 
        else if (inputType == DataType.BYTEARRAY) {
            // good
        }
        else {
            return false;
        }
    }
    
    return true ;
}