Java Code Examples for org.apache.pig.data.DataType#FLOAT

The following examples show how to use org.apache.pig.data.DataType#FLOAT . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SchemaUtil.java    From iceberg with Apache License 2.0 6 votes vote down vote up
private static byte convertType(Type type) throws IOException {
  switch (type.typeId()) {
    case BOOLEAN:   return DataType.BOOLEAN;
    case INTEGER:   return DataType.INTEGER;
    case LONG:      return DataType.LONG;
    case FLOAT:     return DataType.FLOAT;
    case DOUBLE:    return DataType.DOUBLE;
    case TIMESTAMP: return DataType.CHARARRAY;
    case DATE:      return DataType.CHARARRAY;
    case STRING:    return DataType.CHARARRAY;
    case FIXED:     return DataType.BYTEARRAY;
    case BINARY:    return DataType.BYTEARRAY;
    case DECIMAL:   return DataType.BIGDECIMAL;
    case STRUCT:    return DataType.TUPLE;
    case LIST:      return DataType.BAG;
    case MAP:       return DataType.MAP;
    default:
      throw new FrontendException("Unsupported primitive type:" + type);
  }
}
 
Example 2
Source File: OrcStorage.java    From spork with Apache License 2.0 6 votes vote down vote up
@Override
public List<String> getPredicateFields(String location, Job job) throws IOException {
    ResourceSchema schema = getSchema(location, job);
    List<String> predicateFields = new ArrayList<String>();
    for (ResourceFieldSchema field : schema.getFields()) {
        switch(field.getType()) {
        case DataType.BOOLEAN:
        case DataType.INTEGER:
        case DataType.LONG:
        case DataType.FLOAT:
        case DataType.DOUBLE:
        case DataType.DATETIME:
        case DataType.CHARARRAY:
        case DataType.BIGINTEGER:
        case DataType.BIGDECIMAL:
            predicateFields.add(field.getName());
            break;
        default:
            // Skip DataType.BYTEARRAY, DataType.TUPLE, DataType.MAP and DataType.BAG
            break;
        }
    }
    return predicateFields;
}
 
Example 3
Source File: DBStorage.java    From spork with Apache License 2.0 6 votes vote down vote up
protected int sqlDataTypeFromPigDataType(byte pigDataType) {
    switch(pigDataType) {
    case DataType.INTEGER:
        return java.sql.Types.INTEGER;
    case DataType.LONG:
        return java.sql.Types.BIGINT;
    case DataType.FLOAT:
        return java.sql.Types.FLOAT;
    case DataType.DOUBLE:
        return java.sql.Types.DOUBLE;
    case DataType.BOOLEAN:
        return java.sql.Types.BOOLEAN;
    case DataType.DATETIME:
        return java.sql.Types.DATE;
    case DataType.BYTEARRAY:
    case DataType.CHARARRAY:
    case DataType.BYTE:
        return java.sql.Types.VARCHAR;
    default:
        log.warn("Can not find SQL data type for " + pigDataType + " returning VARCHAR");
        return java.sql.Types.VARCHAR;
    }
}
 
Example 4
Source File: PigSchema2Avro.java    From Cubert with Apache License 2.0 6 votes vote down vote up
/**
 * Convert Pig primitive type to Avro type
 * 
 */
protected static Schema convertPrimitiveType(byte pigType) throws IOException {

    if (pigType == DataType.BOOLEAN) {
        return AvroStorageUtils.BooleanSchema;
    } else if (pigType == DataType.BYTEARRAY) {
        return AvroStorageUtils.BytesSchema;
    } else if (pigType == DataType.CHARARRAY
                                    || pigType == DataType.BIGCHARARRAY) {
        return AvroStorageUtils.StringSchema;
    } else if (pigType == DataType.DOUBLE) {
        return AvroStorageUtils.DoubleSchema;
    } else if (pigType == DataType.FLOAT) {
        return AvroStorageUtils.FloatSchema;
    } else if (pigType == DataType.INTEGER) {
        return AvroStorageUtils.IntSchema;
    } else if (pigType == DataType.LONG) {
        return AvroStorageUtils.LongSchema;
    } else
        throw new IOException("unsupported pig type:"
                                        + DataType.findTypeName(pigType));

}
 
Example 5
Source File: Divide.java    From spork with Apache License 2.0 6 votes vote down vote up
protected Number divide(Number a, Number b, byte dataType) throws ExecException {
    switch (dataType) {
    case DataType.DOUBLE:
        return Double.valueOf((Double) a / (Double) b);
    case DataType.INTEGER:
        return Integer.valueOf((Integer) a / (Integer) b);
    case DataType.LONG:
        return Long.valueOf((Long) a / (Long) b);
    case DataType.FLOAT:
        return Float.valueOf((Float) a / (Float) b);
    case DataType.BIGINTEGER:
        return ((BigInteger) a).divide((BigInteger) b);
    case DataType.BIGDECIMAL:
        return ((BigDecimal) a).divide((BigDecimal) b);
    default:
        throw new ExecException("called on unsupported Number class " + DataType.findTypeName(dataType));
    }
}
 
Example 6
Source File: TestPOBinCond.java    From spork with Apache License 2.0 6 votes vote down vote up
@Test
public void testPOBinCondWithFloat() throws  ExecException, PlanException {
   bag= getBag(DataType.FLOAT);
   TestPoBinCondHelper testHelper= new TestPoBinCondHelper(DataType.FLOAT, new Float(1.0f) );

   for(Iterator<Tuple> it = bag.iterator(); it.hasNext(); ) {
       Tuple t = it.next();
       testHelper.getPlan().attachInput(t);
       Float value = (Float) t.get(0);
       int expected = (value.floatValue() == 1.0f )? 1:0 ;
       Integer dummy = new Integer(0);
       Integer result=(Integer)testHelper.getOperator().getNextInteger().result;
       int actual = result.intValue();
       assertEquals( expected, actual );
    }

}
 
Example 7
Source File: TestTypeCheckingValidatorNewLP.java    From spork with Apache License 2.0 5 votes vote down vote up
@Test
public void testBincond() throws Throwable {
    String query = "a= load 'a' as (name: chararray, age: int, gpa: float);"
    + "b = group a by name;"
    + "c = foreach b generate (IsEmpty(a) ? " + TestBinCondFieldSchema.class.getName() + "(*): a);";

    LOForEach foreach = getForeachFromPlan(query);

    Schema.FieldSchema charFs = new FieldSchema(null, DataType.CHARARRAY);
    Schema.FieldSchema intFs = new FieldSchema(null, DataType.INTEGER);
    Schema.FieldSchema floatFs = new FieldSchema(null, DataType.FLOAT);
    Schema tupleSchema= new Schema();
    tupleSchema.add(charFs);
    tupleSchema.add(intFs);
    tupleSchema.add(floatFs);
    Schema.FieldSchema bagFs = null;
    Schema bagSchema = new Schema();
    bagSchema.add(new FieldSchema(null, tupleSchema, DataType.TUPLE));

    try {
        bagFs = new Schema.FieldSchema(null, bagSchema, DataType.BAG);
    } catch (FrontendException fee) {
        fail("Did not expect an error");
    }

    Schema expectedSchema = new Schema(bagFs);
    Schema foreachSch = org.apache.pig.newplan.logical.Util.translateSchema(foreach.getSchema());
    assertTrue(Schema.equals(foreachSch, expectedSchema, false, true));
}
 
Example 8
Source File: PhysicalOperator.java    From spork with Apache License 2.0 5 votes vote down vote up
/**
 * Implementations that call into the different versions of getNext are often
 * identical, differing only in the signature of the getNext() call they make.
 * This method allows to cut down on some of the copy-and-paste.
 * @param dataType Describes the type of obj; a byte from DataType.
 *
 * @return result Result of applying this Operator to the Object.
 * @throws ExecException
 */
public Result getNext(byte dataType) throws ExecException {
    try {
        switch (dataType) {
        case DataType.BAG:
            return getNextDataBag();
        case DataType.BOOLEAN:
            return getNextBoolean();
        case DataType.BYTEARRAY:
            return getNextDataByteArray();
        case DataType.CHARARRAY:
            return getNextString();
        case DataType.DOUBLE:
            return getNextDouble();
        case DataType.FLOAT:
            return getNextFloat();
        case DataType.INTEGER:
            return getNextInteger();
        case DataType.LONG:
            return getNextLong();
        case DataType.BIGINTEGER:
            return getNextBigInteger();
        case DataType.BIGDECIMAL:
            return getNextBigDecimal();
        case DataType.DATETIME:
            return getNextDateTime();
        case DataType.MAP:
            return getNextMap();
        case DataType.TUPLE:
            return getNextTuple();
        default:
            throw new ExecException("Unsupported type for getNext: " + DataType.findTypeName(dataType));
        }
    } catch (RuntimeException e) {
        throw new ExecException("Exception while executing " + this.toString() + ": " + e.toString(), e);
    }
}
 
Example 9
Source File: VARBAG.java    From spork with Apache License 2.0 5 votes vote down vote up
public VARBAG(String bagColName, String tupleColName, String fieldType) {

        if ( bagColName== null || tupleColName == null  || fieldType == null)  {
	    throw new RuntimeException("The bagColName  and fieldType cannot be null");
        }

    	this.bagColName   = bagColName;	
    	this.tupleColName = tupleColName;	
        if ( fieldType.equalsIgnoreCase( "CHARARRAY" )){ 
             this.fieldType = DataType.CHARARRAY;

        } else if ( fieldType.equalsIgnoreCase( "DOUBLE" )){ 
            this.fieldType = DataType.DOUBLE;

        } else if ( fieldType.equalsIgnoreCase( "FLOAT" )){ 
            this.fieldType = DataType.FLOAT; 

        } else if ( fieldType.equalsIgnoreCase( "BOOLEAN" )) {
            this.fieldType = DataType.BOOLEAN;
            
        } else if ( fieldType.equalsIgnoreCase( "INTEGER" )){ 
            this.fieldType = DataType.INTEGER;

        } else if ( fieldType.equalsIgnoreCase( "LONG" )){ 
            this.fieldType = DataType.LONG; 

        } else if ( fieldType.equalsIgnoreCase( "MAP" )){ 
            this.fieldType = DataType.MAP; 
        } else {
	    throw new RuntimeException("This type"+ fieldType +"is not supported in " + this.getClass().getSimpleName());
        }

    }
 
Example 10
Source File: TOBAG.java    From spork with Apache License 2.0 5 votes vote down vote up
public TOBAG(String bagColName, String tupleColName, String fieldType) {

        if ( bagColName== null || tupleColName == null  || fieldType == null)  {
	    throw new RuntimeException("The bagColName  and fieldType cannot be null");
        }

    	this.bagColName   = bagColName;	
    	this.tupleColName = tupleColName;	

        if ( fieldType.equalsIgnoreCase( "CHARARRAY" )){ 
             this.fieldType = DataType.CHARARRAY;

        } else if ( fieldType.equalsIgnoreCase( "DOUBLE" )){ 
            this.fieldType = DataType.DOUBLE;

        } else if ( fieldType.equalsIgnoreCase( "FLOAT" )){ 
            this.fieldType = DataType.FLOAT; 

        } else if ( fieldType.equalsIgnoreCase( "BOOLEAN" )){ 
            this.fieldType = DataType.BOOLEAN; 

        } else if ( fieldType.equalsIgnoreCase( "INTEGER" )){ 
            this.fieldType = DataType.INTEGER;

        } else if ( fieldType.equalsIgnoreCase( "LONG" )){ 
            this.fieldType = DataType.LONG; 

        } else if ( fieldType.equalsIgnoreCase( "MAP" )){ 
            this.fieldType = DataType.MAP; 
        } else {
	    throw new RuntimeException("This type"+ fieldType +"is not supported in TOBAG");
        }

    }
 
Example 11
Source File: SequenceFileLoader.java    From spork with Apache License 2.0 5 votes vote down vote up
protected byte inferPigDataType(Type t) {
  if (t == BytesWritable.class) return DataType.BYTEARRAY;
  else if (t == Text.class) return DataType.CHARARRAY;
  else if (t == IntWritable.class) return DataType.INTEGER;
  else if (t == LongWritable.class) return DataType.LONG;
  else if (t == FloatWritable.class) return DataType.FLOAT;
  else if (t == DoubleWritable.class) return DataType.DOUBLE;
  else if (t == BooleanWritable.class) return DataType.BOOLEAN;
  else if (t == ByteWritable.class) return DataType.BYTE;
  else if (t == DateTimeWritable.class) return DataType.DATETIME;
  // not doing maps or other complex types for now
  else return DataType.ERROR;
}
 
Example 12
Source File: HBaseStorage.java    From spork with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private byte[] objToBytes(Object o, byte type) throws IOException {
    LoadStoreCaster caster = (LoadStoreCaster) caster_;
    if (o == null) return null;
    switch (type) {
    case DataType.BYTEARRAY: return ((DataByteArray) o).get();
    case DataType.BAG: return caster.toBytes((DataBag) o);
    case DataType.CHARARRAY: return caster.toBytes((String) o);
    case DataType.DOUBLE: return caster.toBytes((Double) o);
    case DataType.FLOAT: return caster.toBytes((Float) o);
    case DataType.INTEGER: return caster.toBytes((Integer) o);
    case DataType.LONG: return caster.toBytes((Long) o);
    case DataType.BIGINTEGER: return caster.toBytes((BigInteger) o);
    case DataType.BIGDECIMAL: return caster.toBytes((BigDecimal) o);
    case DataType.BOOLEAN: return caster.toBytes((Boolean) o);
    case DataType.DATETIME: return caster.toBytes((DateTime) o);

    // The type conversion here is unchecked.
    // Relying on DataType.findType to do the right thing.
    case DataType.MAP: return caster.toBytes((Map<String, Object>) o);

    case DataType.NULL: return null;
    case DataType.TUPLE: return caster.toBytes((Tuple) o);
    case DataType.ERROR: throw new IOException("Unable to determine type of " + o.getClass());
    default: throw new IOException("Unable to find a converter for tuple field " + o);
    }
}
 
Example 13
Source File: TezDagBuilder.java    From spork with Apache License 2.0 4 votes vote down vote up
private static Class<? extends WritableComparator> comparatorForKeyType(byte keyType, boolean hasOrderBy)
        throws JobCreationException {

    switch (keyType) {
    case DataType.BOOLEAN:
        return PigBooleanRawComparator.class;

    case DataType.INTEGER:
        return PigIntRawComparator.class;

    case DataType.BIGINTEGER:
        return PigBigIntegerRawComparator.class;

    case DataType.BIGDECIMAL:
        return PigBigDecimalRawComparator.class;

    case DataType.LONG:
        return PigLongRawComparator.class;

    case DataType.FLOAT:
        return PigFloatRawComparator.class;

    case DataType.DOUBLE:
        return PigDoubleRawComparator.class;

    case DataType.DATETIME:
        return PigDateTimeRawComparator.class;

    case DataType.CHARARRAY:
        return PigTextRawComparator.class;

    case DataType.BYTEARRAY:
        //if (hasOrderBy) {
            return PigBytesRawComparator.class;
        //} else {
        //    return PigDBAWritableComparator.class;
        //}

    case DataType.MAP:
        int errCode = 1068;
        String msg = "Using Map as key not supported.";
        throw new JobCreationException(msg, errCode, PigException.INPUT);

    case DataType.TUPLE:
        //TODO: PigTupleWritableComparator gives wrong results with cogroup in
        //Checkin_2 and few other e2e tests. But MR has PigTupleWritableComparator
        //Investigate the difference later
        //if (hasOrderBy) {
            return PigTupleSortComparator.class;
        //} else {
        //    return PigTupleWritableComparator.class;
        //}

    case DataType.BAG:
        errCode = 1068;
        msg = "Using Bag as key not supported.";
        throw new JobCreationException(msg, errCode, PigException.INPUT);

    default:
        errCode = 2036;
        msg = "Unhandled key type " + DataType.findTypeName(keyType);
        throw new JobCreationException(msg, errCode, PigException.BUG);
    }
}
 
Example 14
Source File: TupleConverter.java    From parquet-mr with Apache License 2.0 4 votes vote down vote up
static Converter newConverter(FieldSchema pigField, Type type, final ParentValueContainer parent, boolean elephantBirdCompatible, boolean columnIndexAccess) {
  try {
    switch (pigField.type) {
    case DataType.BAG:
      return new BagConverter(type.asGroupType(), pigField, parent, elephantBirdCompatible, columnIndexAccess);
    case DataType.MAP:
      return new MapConverter(type.asGroupType(), pigField, parent, elephantBirdCompatible, columnIndexAccess);
    case DataType.TUPLE:
      return new TupleConverter(type.asGroupType(), pigField.schema, elephantBirdCompatible, columnIndexAccess) {
        @Override
        public void end() {
          super.end();
          parent.add(this.currentTuple);
        }
      };
    case DataType.CHARARRAY:
        //If the orignal type isn't a string, we don't want to use the dictionary because
        //a custom implementation will be needed for each type.  Just default to no dictionary.
      return new FieldStringConverter(parent, type.getLogicalTypeAnnotation() instanceof LogicalTypeAnnotation.StringLogicalTypeAnnotation);
    case DataType.BYTEARRAY:
      return new FieldByteArrayConverter(parent);
    case DataType.INTEGER:
      return new FieldIntegerConverter(parent);
    case DataType.BOOLEAN:
      if (elephantBirdCompatible) {
        return new FieldIntegerConverter(parent);
      } else {
        return new FieldBooleanConverter(parent);
      }
    case DataType.FLOAT:
      return new FieldFloatConverter(parent);
    case DataType.DOUBLE:
      return new FieldDoubleConverter(parent);
    case DataType.LONG:
      return new FieldLongConverter(parent);
    case DataType.BIGDECIMAL:
      return new FieldBigDecimalConverter(type, parent);
    default:
      throw new TupleConversionException("unsupported pig type: " + pigField);
    }
  } catch (FrontendException | RuntimeException e) {
    throw new TupleConversionException(
        "error while preparing converter for:\n" + pigField + "\n" + type, e);
  }
}
 
Example 15
Source File: FixedWidthLoader.java    From spork with Apache License 2.0 4 votes vote down vote up
private Object readField(String line, ResourceFieldSchema field, FixedWidthField column) 
                         throws IOException, IllegalArgumentException {

    int start = column.start;
    int end = Math.min(column.end, line.length());

    if (start > line.length())
        return null;

    if (end <= start)
        return null;

    String s  = line.substring(start, end);
    String sTrim = s.trim();

    switch (field.getType()) {
        case DataType.UNKNOWN:
        case DataType.BYTEARRAY:
        case DataType.CHARARRAY:
            if (s.trim().length() == 0)
                return null;
            return s.trim();

        case DataType.BOOLEAN:
            return Boolean.parseBoolean(sTrim);

        case DataType.INTEGER:
            return Integer.parseInt(sTrim);

        case DataType.LONG:
            return Long.parseLong(sTrim);

        case DataType.FLOAT:
            return Float.parseFloat(sTrim);
        
        case DataType.DOUBLE:
            return Double.parseDouble(sTrim);

        case DataType.DATETIME:
            return (new DateTime(sTrim)).toDateTime(DateTimeZone.UTC);

        case DataType.MAP:
        case DataType.TUPLE:
        case DataType.BAG:
            throw new IllegalArgumentException("Object types (map, tuple, bag) are not supported by FixedWidthLoader");
        
        default:
            throw new IllegalArgumentException(
                "Unknown type in input schema: " + field.getType());
    }
}
 
Example 16
Source File: TestWarningFunc.java    From spork with Apache License 2.0 4 votes vote down vote up
public Double exec(Tuple input) throws IOException 
{
	if (input == null || input.size() == 0) {
           pigLogger.warn(this, "Input is empty.", PigWarning.UDF_WARNING_1); 
		return null;
       }

       Double output = null;
       boolean accumulated = false;

	try {
           for(int i = 0; i < input.size(); ++i) {
               Object o = input.get(i);
               byte inputType = DataType.findType(o);
               if(DataType.isNumberType(inputType)) {
                   if(!accumulated) {
                       output = 0.0;
                       accumulated = true;
                   }
                   switch(inputType) {
                   case DataType.INTEGER:
                       output += (Integer)o;
                       break;

                   case DataType.LONG:
                       output += (Long)o;
                       break;

                   case DataType.FLOAT:
                       output += (Float)o;
                       break;

                   case DataType.DOUBLE:
                       output += (Double)o;
                       break;
                   }

               } else {
                   pigLogger.warn(this, "Found a non-numeric type.", PigWarning.UDF_WARNING_3);
               }
           }
	} catch(Exception e){
           pigLogger.warn(this, "Problem while computing output.", PigWarning.UDF_WARNING_2); 
		return null;
	}

       if(!accumulated) {
           pigLogger.warn(this, "Did not find any numeric type in the input.", PigWarning.UDF_WARNING_4);
       }

	return output;
   }
 
Example 17
Source File: TestPackage.java    From spork with Apache License 2.0 4 votes vote down vote up
/**
 * To show that it does not have any type specific
 * code
 */
private void pickTest(byte t, boolean[] inner) throws ExecException, IOException {
    Random r = new Random();
    switch (t) {
    case DataType.BAG:
        runTest(GenRandomData.genRandSmallTupDataBag(r, 10, 100), inner, DataType.BAG);
        break;
    case DataType.BOOLEAN:
        runTest(r.nextBoolean(), inner, DataType.BOOLEAN);
        break;
    case DataType.BYTEARRAY:
        runTest(GenRandomData.genRandDBA(r), inner, DataType.BYTEARRAY);
        break;
    case DataType.BIGCHARARRAY: {
        String s = GenRandomData.genRandString(r);
        for (; s.length() < 65535;) {
            s += GenRandomData.genRandString(r);
        }
        runTest(s, inner, DataType.CHARARRAY);
        break;
    }
    case DataType.CHARARRAY:
        runTest(GenRandomData.genRandString(r), inner, DataType.CHARARRAY);
        break;
    case DataType.DOUBLE:
        runTest(r.nextDouble(), inner, DataType.DOUBLE);
        break;
    case DataType.FLOAT:
        runTest(r.nextFloat(), inner, DataType.FLOAT);
        break;
    case DataType.INTEGER:
        runTest(r.nextInt(), inner, DataType.INTEGER);
        break;
    case DataType.LONG:
        runTest(r.nextLong(), inner, DataType.LONG);
        break;
    case DataType.DATETIME:
        runTest(new DateTime(r.nextLong()), inner, DataType.DATETIME);
        break;
    case DataType.MAP:
    case DataType.INTERNALMAP:
    case DataType.BYTE:
        return; // map not key type
    case DataType.TUPLE:
        runTest(GenRandomData.genRandSmallBagTuple(r, 10, 100), inner, DataType.TUPLE);
        break;
    case DataType.BIGINTEGER:
        runTest(new BigInteger(256, r), inner, DataType.BIGINTEGER);
        break;
    case DataType.BIGDECIMAL:
        runTest(new BigDecimal(r.nextDouble()), inner, DataType.BIGDECIMAL);
        break;
    default:
        fail("No test case for type " + DataType.findTypeName(t));
    }
}
 
Example 18
Source File: TypeUtil.java    From phoenix with Apache License 2.0 4 votes vote down vote up
/**
 * This method returns the most appropriate PDataType associated with 
 * the incoming Pig type. Note for Pig DataType DATETIME, returns DATE as 
 * inferredSqlType. 
 * 
 * This is later used to make a cast to targetPhoenixType accordingly. See
 * {@link #castPigTypeToPhoenix(Object, byte, PDataType)}
 * 
 * @param obj
 * @return PDataType
 */
public static PDataType getType(Object obj, byte type) {
	if (obj == null) {
		return null;
	}
	PDataType sqlType;

	switch (type) {
	case DataType.BYTEARRAY:
		sqlType = PVarbinary.INSTANCE;
		break;
	case DataType.CHARARRAY:
		sqlType = PVarchar.INSTANCE;
		break;
	case DataType.DOUBLE:
	case DataType.BIGDECIMAL:
		sqlType = PDouble.INSTANCE;
		break;
	case DataType.FLOAT:
		sqlType = PFloat.INSTANCE;
		break;
	case DataType.INTEGER:
		sqlType = PInteger.INSTANCE;
		break;
	case DataType.LONG:
	case DataType.BIGINTEGER:
		sqlType = PLong.INSTANCE;
		break;
	case DataType.BOOLEAN:
		sqlType = PBoolean.INSTANCE;
		break;
	case DataType.DATETIME:
		sqlType = PDate.INSTANCE;
		break;
	case DataType.BYTE:
		sqlType = PTinyint.INSTANCE;
		break;
	default:
		throw new RuntimeException("Unknown type " + obj.getClass().getName()
				+ " passed to PhoenixHBaseStorage");
	}

	return sqlType;

}
 
Example 19
Source File: POPreCombinerLocalRearrange.java    From spork with Apache License 2.0 4 votes vote down vote up
/**
 * Calls getNext on the generate operator inside the nested
 * physical plan. Converts the generated tuple into the proper
 * format, i.e, (key,indexedTuple(value))
 */
@Override
public Result getNextTuple() throws ExecException {

    Result inp = null;
    Result res = ERR_RESULT;
    while (true) {
        inp = processInput();
        if (inp.returnStatus == POStatus.STATUS_EOP || inp.returnStatus == POStatus.STATUS_ERR) {
            break;
        }
        if (inp.returnStatus == POStatus.STATUS_NULL) {
            continue;
        }

        for (PhysicalPlan ep : plans) {
            ep.attachInput((Tuple)inp.result);
        }
        List<Result> resLst = new ArrayList<Result>();
        for (ExpressionOperator op : leafOps){

            switch(op.getResultType()){
            case DataType.BAG:
            case DataType.BOOLEAN:
            case DataType.BYTEARRAY:
            case DataType.CHARARRAY:
            case DataType.DOUBLE:
            case DataType.FLOAT:
            case DataType.INTEGER:
            case DataType.LONG:
            case DataType.BIGINTEGER:
            case DataType.BIGDECIMAL:
            case DataType.DATETIME:
            case DataType.MAP:
            case DataType.TUPLE:
                res = op.getNext(op.getResultType());
                break;
            default:
                log.error("Invalid result type: "
                        + DataType.findType(op.getResultType()));
                break;
            }

            if (res.returnStatus != POStatus.STATUS_OK) {
                return res;
            }

            resLst.add(res);
        }
        res.result = constructLROutput(resLst,(Tuple)inp.result);
        res.returnStatus = POStatus.STATUS_OK;

        return res;
    }
    return inp;
}
 
Example 20
Source File: FloatAbs.java    From spork with Apache License 2.0 4 votes vote down vote up
@Override
public Schema outputSchema(Schema input) {
       return new Schema(new Schema.FieldSchema(getSchemaName(this.getClass().getName().toLowerCase(), input), DataType.FLOAT));
}