Java Code Examples for org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory#getPrimitiveJavaObjectInspector()
The following examples show how to use
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory#getPrimitiveJavaObjectInspector() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: incubator-iotdb File: TsFileSerDe.java License: Apache License 2.0 | 6 votes |
private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws TsFileSerDeException { if(!supportedCategories(ti)) { throw new TsFileSerDeException("Don't yet support this type: " + ti); } ObjectInspector result; switch(ti.getCategory()) { case PRIMITIVE: PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti; result = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti); break; // these types is not supported in TsFile case LIST: case MAP: case STRUCT: case UNION: throw new TsFileSerDeException("The type is not supported in TsFile: " + ti); default: throw new TsFileSerDeException("No Hive categories matched: " + ti); } return result; }
Example 2
Source Project: dremio-oss File: HiveTestUDFImpls.java License: Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 1) { throw new UDFArgumentLengthException(String.format("%s needs 1 argument, got %d", udfName, arguments.length)); } if (arguments[0].getCategory() != Category.PRIMITIVE || ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() != inputType) { String actual = arguments[0].getCategory() + (arguments[0].getCategory() == Category.PRIMITIVE ? "[" + ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() + "]" : ""); throw new UDFArgumentException( String.format("%s only takes primitive type %s, got %s", udfName, inputType, actual)); } argumentOI = arguments[0]; return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(outputType); }
Example 3
Source Project: hiped2 File: Geoloc.java License: Apache License 2.0 | 6 votes |
@Override public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException { if (arguments.length != 2) { throw new UDFArgumentLengthException( "The function COUNTRY(ip, geolocfile) takes exactly 2 arguments."); } converters = new ObjectInspectorConverters.Converter[arguments.length]; for (int i = 0; i < arguments.length; i++) { converters[i] = ObjectInspectorConverters.getConverter(arguments[i], PrimitiveObjectInspectorFactory.javaStringObjectInspector); } return PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector(PrimitiveObjectInspector.PrimitiveCategory.STRING); }
Example 4
Source Project: incubator-retired-blur File: BlurObjectInspectorGenerator.java License: Apache License 2.0 | 6 votes |
private ObjectInspector createObjectInspectorWorker(TypeInfo ti) throws SerDeException { switch (ti.getCategory()) { case PRIMITIVE: PrimitiveTypeInfo pti = (PrimitiveTypeInfo) ti; return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(pti); case STRUCT: StructTypeInfo sti = (StructTypeInfo) ti; List<ObjectInspector> ois = new ArrayList<ObjectInspector>(sti.getAllStructFieldTypeInfos().size()); for (TypeInfo typeInfo : sti.getAllStructFieldTypeInfos()) { ois.add(createObjectInspectorWorker(typeInfo)); } return ObjectInspectorFactory.getStandardStructObjectInspector(sti.getAllStructFieldNames(), ois); case LIST: ListTypeInfo lti = (ListTypeInfo) ti; TypeInfo listElementTypeInfo = lti.getListElementTypeInfo(); return ObjectInspectorFactory.getStandardListObjectInspector(createObjectInspectorWorker(listElementTypeInfo)); default: throw new SerDeException("No Hive categories matched for [" + ti + "]"); } }
Example 5
Source Project: flink File: HiveInspectors.java License: Apache License 2.0 | 5 votes |
private static ObjectInspector getObjectInspector(TypeInfo type) { switch (type.getCategory()) { case PRIMITIVE: PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type; return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType); case LIST: ListTypeInfo listType = (ListTypeInfo) type; return ObjectInspectorFactory.getStandardListObjectInspector( getObjectInspector(listType.getListElementTypeInfo())); case MAP: MapTypeInfo mapType = (MapTypeInfo) type; return ObjectInspectorFactory.getStandardMapObjectInspector( getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo())); case STRUCT: StructTypeInfo structType = (StructTypeInfo) type; List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos(); List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>(); for (TypeInfo fieldType : fieldTypes) { fieldInspectors.add(getObjectInspector(fieldType)); } return ObjectInspectorFactory.getStandardStructObjectInspector( structType.getAllStructFieldNames(), fieldInspectors); default: throw new CatalogException("Unsupported Hive type category " + type.getCategory()); } }
Example 6
Source Project: incubator-hivemall File: MajorityVoteUDAF.java License: Apache License 2.0 | 5 votes |
@Override public ObjectInspector init(@Nonnull Mode mode, @Nonnull ObjectInspector[] argOIs) throws HiveException { assert (argOIs.length == 1); super.init(mode, argOIs); // initialize input if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {// from original data this.keyInputOI = asPrimitiveObjectInspector(argOIs[0]); } else {// from partial aggregation this.partialOI = (StandardMapObjectInspector) argOIs[0]; this.keyInputOI = asPrimitiveObjectInspector(partialOI.getMapKeyObjectInspector()); this.counterInputOI = asLongOI(partialOI.getMapValueObjectInspector()); } this.keyOutputOI = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( keyInputOI.getTypeInfo()); // initialize output final ObjectInspector outputOI; if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {// terminatePartial outputOI = ObjectInspectorFactory.getStandardMapObjectInspector(keyOutputOI, PrimitiveObjectInspectorFactory.javaLongObjectInspector); } else {// terminate outputOI = keyOutputOI; } return outputOI; }
Example 7
Source Project: incubator-hivemall File: FMeasureUDAFTest.java License: Apache License 2.0 | 5 votes |
private void binarySetUp(Object actual, Object predicted, double beta, String average) throws Exception { fmeasure = new FMeasureUDAF(); inputOIs = new ObjectInspector[3]; String actualClassName = actual.getClass().getName(); if (actualClassName.equals("java.lang.Integer")) { inputOIs[0] = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.INT); } else if (actualClassName.equals("java.lang.Boolean")) { inputOIs[0] = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN); } else if ((actualClassName.equals("java.lang.String"))) { inputOIs[0] = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.STRING); } String predicatedClassName = predicted.getClass().getName(); if (predicatedClassName.equals("java.lang.Integer")) { inputOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.INT); } else if (predicatedClassName.equals("java.lang.Boolean")) { inputOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN); } else if ((predicatedClassName.equals("java.lang.String"))) { inputOIs[1] = PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.STRING); } inputOIs[2] = ObjectInspectorUtils.getConstantObjectInspector( PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-beta " + beta + " -average " + average); evaluator = fmeasure.getEvaluator(new SimpleGenericUDAFParameterInfo(inputOIs, false, false)); agg = (FMeasureUDAF.FMeasureAggregationBuffer) evaluator.getNewAggregationBuffer(); }
Example 8
Source Project: incubator-hivemall File: PLSAPredictUDAFTest.java License: Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Test public void testTerminateWithSameTopicProbability() throws Exception { udaf = new PLSAPredictUDAF(); inputOIs = new ObjectInspector[] { PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.STRING), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.INT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), ObjectInspectorUtils.getConstantObjectInspector( PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-topics 2")}; evaluator = udaf.getEvaluator(new SimpleGenericUDAFParameterInfo(inputOIs, false, false)); agg = (PLSAPredictUDAF.PLSAPredictAggregationBuffer) evaluator.getNewAggregationBuffer(); evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); // Assume that all words in a document are NOT in vocabulary that composes a LDA model. // Hence, the document should be assigned to topic #1 (#2) with probability 0.5 (0.5). for (int i = 0; i < words.length; i++) { String word = words[i]; evaluator.iterate(agg, new Object[] {word, 0.f, labels[i], probs[i]}); } // Probability for each of the two topics should be same. List<Object[]> result = (List<Object[]>) evaluator.terminate(agg); Assert.assertEquals(result.size(), 2); Assert.assertEquals(result.get(0)[1], result.get(1)[1]); }
Example 9
Source Project: incubator-hivemall File: LDAPredictUDAFTest.java License: Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") @Test public void testTerminateWithSameTopicProbability() throws Exception { udaf = new LDAPredictUDAF(); inputOIs = new ObjectInspector[] { PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.STRING), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.INT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), ObjectInspectorUtils.getConstantObjectInspector( PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-topics 2")}; evaluator = udaf.getEvaluator(new SimpleGenericUDAFParameterInfo(inputOIs, false, false)); agg = (LDAPredictUDAF.OnlineLDAPredictAggregationBuffer) evaluator.getNewAggregationBuffer(); evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); // Assume that all words in a document are NOT in vocabulary that composes a LDA model. // Hence, the document should be assigned to topic #1 (#2) with probability 0.5 (0.5). for (int i = 0; i < 18; i++) { evaluator.iterate(agg, new Object[] {words[i], 0.f, labels[i], lambdas[i]}); } // Probability for each of the two topics should be same. List<Object[]> result = (List<Object[]>) evaluator.terminate(agg); Assert.assertEquals(result.size(), 2); Assert.assertEquals(result.get(0)[1], result.get(1)[1]); }
Example 10
Source Project: flink File: HiveInspectors.java License: Apache License 2.0 | 5 votes |
private static ObjectInspector getObjectInspector(TypeInfo type) { switch (type.getCategory()) { case PRIMITIVE: PrimitiveTypeInfo primitiveType = (PrimitiveTypeInfo) type; return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(primitiveType); case LIST: ListTypeInfo listType = (ListTypeInfo) type; return ObjectInspectorFactory.getStandardListObjectInspector( getObjectInspector(listType.getListElementTypeInfo())); case MAP: MapTypeInfo mapType = (MapTypeInfo) type; return ObjectInspectorFactory.getStandardMapObjectInspector( getObjectInspector(mapType.getMapKeyTypeInfo()), getObjectInspector(mapType.getMapValueTypeInfo())); case STRUCT: StructTypeInfo structType = (StructTypeInfo) type; List<TypeInfo> fieldTypes = structType.getAllStructFieldTypeInfos(); List<ObjectInspector> fieldInspectors = new ArrayList<ObjectInspector>(); for (TypeInfo fieldType : fieldTypes) { fieldInspectors.add(getObjectInspector(fieldType)); } return ObjectInspectorFactory.getStandardStructObjectInspector( structType.getAllStructFieldNames(), fieldInspectors); default: throw new CatalogException("Unsupported Hive type category " + type.getCategory()); } }
Example 11
Source Project: Hive-XML-SerDe File: XmlObjectInspectorFactory.java License: Apache License 2.0 | 5 votes |
/** * Returns the standard java object inspector * * @param typeInfo * the type info * @param xmlProcessor * the XML processor * @return the standard java object inspector */ public static ObjectInspector getStandardJavaObjectInspectorFromTypeInfo(TypeInfo typeInfo, XmlProcessor xmlProcessor) { switch (typeInfo.getCategory()) { case PRIMITIVE: { return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(((PrimitiveTypeInfo) typeInfo).getPrimitiveCategory()); } case LIST: { ObjectInspector listElementObjectInspector = getStandardJavaObjectInspectorFromTypeInfo(((ListTypeInfo) typeInfo).getListElementTypeInfo(), xmlProcessor); return new XmlListObjectInspector(listElementObjectInspector, xmlProcessor); } case MAP: { MapTypeInfo mapTypeInfo = (MapTypeInfo) typeInfo; ObjectInspector mapKeyObjectInspector = getStandardJavaObjectInspectorFromTypeInfo(mapTypeInfo.getMapKeyTypeInfo(), xmlProcessor); ObjectInspector mapValueObjectInspector = getStandardJavaObjectInspectorFromTypeInfo(mapTypeInfo.getMapValueTypeInfo(), xmlProcessor); return new XmlMapObjectInspector(mapKeyObjectInspector, mapValueObjectInspector, xmlProcessor); } case STRUCT: { StructTypeInfo structTypeInfo = (StructTypeInfo) typeInfo; List<String> structFieldNames = structTypeInfo.getAllStructFieldNames(); List<TypeInfo> fieldTypeInfos = structTypeInfo.getAllStructFieldTypeInfos(); List<ObjectInspector> structFieldObjectInspectors = new ArrayList<ObjectInspector>(fieldTypeInfos.size()); for (int fieldIndex = 0; fieldIndex < fieldTypeInfos.size(); ++fieldIndex) { structFieldObjectInspectors.add(getStandardJavaObjectInspectorFromTypeInfo(fieldTypeInfos.get(fieldIndex), xmlProcessor)); } return getStandardStructObjectInspector(structFieldNames, structFieldObjectInspectors, xmlProcessor); } default: { throw new IllegalStateException(); } } }
Example 12
Source Project: incubator-hivemall File: AUCUDAFTest.java License: Apache License 2.0 | 4 votes |
@Before public void setUp() throws Exception { auc = new AUCUDAF(); inputOIs = new ObjectInspector[] { PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.DOUBLE), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.INT)}; evaluator = auc.getEvaluator(new SimpleGenericUDAFParameterInfo(inputOIs, false, false)); ArrayList<String> fieldNames = new ArrayList<String>(); ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(); fieldNames.add("indexScore"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector); fieldNames.add("area"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableDoubleObjectInspector); fieldNames.add("fp"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector); fieldNames.add("tp"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector); fieldNames.add("fpPrev"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector); fieldNames.add("tpPrev"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableLongObjectInspector); MapObjectInspector areaPartialMapOI = ObjectInspectorFactory.getStandardMapObjectInspector( PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableDoubleObjectInspector); fieldNames.add("areaPartialMap"); fieldOIs.add(areaPartialMapOI); MapObjectInspector fpPartialMapOI = ObjectInspectorFactory.getStandardMapObjectInspector( PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector); fieldNames.add("fpPartialMap"); fieldOIs.add(fpPartialMapOI); MapObjectInspector tpPartialMapOI = ObjectInspectorFactory.getStandardMapObjectInspector( PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector); fieldNames.add("tpPartialMap"); fieldOIs.add(tpPartialMapOI); MapObjectInspector fpPrevPartialMapOI = ObjectInspectorFactory.getStandardMapObjectInspector( PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector); fieldNames.add("fpPrevPartialMap"); fieldOIs.add(fpPrevPartialMapOI); MapObjectInspector tpPrevPartialMapOI = ObjectInspectorFactory.getStandardMapObjectInspector( PrimitiveObjectInspectorFactory.writableDoubleObjectInspector, PrimitiveObjectInspectorFactory.writableLongObjectInspector); fieldNames.add("tpPrevPartialMap"); fieldOIs.add(tpPrevPartialMapOI); partialOI = new ObjectInspector[2]; partialOI[0] = ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs); agg = (AUCUDAF.ClassificationAUCAggregationBuffer) evaluator.getNewAggregationBuffer(); }
Example 13
Source Project: incubator-hivemall File: PLSAPredictUDAFTest.java License: Apache License 2.0 | 4 votes |
@Test public void testMerge() throws Exception { udaf = new PLSAPredictUDAF(); inputOIs = new ObjectInspector[] { PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.STRING), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.INT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), ObjectInspectorUtils.getConstantObjectInspector( PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-topics 2")}; evaluator = udaf.getEvaluator(new SimpleGenericUDAFParameterInfo(inputOIs, false, false)); agg = (PLSAPredictUDAF.PLSAPredictAggregationBuffer) evaluator.getNewAggregationBuffer(); final Map<String, Float> doc = new HashMap<String, Float>(); doc.put("apples", 1.f); doc.put("avocados", 1.f); doc.put("colds", 1.f); doc.put("flu", 1.f); doc.put("like", 2.f); doc.put("oranges", 1.f); Object[] partials = new Object[3]; // bin #1 evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); for (int i = 0; i < 6; i++) { evaluator.iterate(agg, new Object[] {words[i], doc.get(words[i]), labels[i], probs[i]}); } partials[0] = evaluator.terminatePartial(agg); // bin #2 evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); for (int i = 6; i < 12; i++) { evaluator.iterate(agg, new Object[] {words[i], doc.get(words[i]), labels[i], probs[i]}); } partials[1] = evaluator.terminatePartial(agg); // bin #3 evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); for (int i = 12; i < 18; i++) { evaluator.iterate(agg, new Object[] {words[i], doc.get(words[i]), labels[i], probs[i]}); } partials[2] = evaluator.terminatePartial(agg); // merge in a different order final int[][] orders = new int[][] {{0, 1, 2}, {1, 0, 2}, {1, 2, 0}, {2, 1, 0}}; for (int i = 0; i < orders.length; i++) { evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL2, partialOI); evaluator.reset(agg); evaluator.merge(agg, partials[orders[i][0]]); evaluator.merge(agg, partials[orders[i][1]]); evaluator.merge(agg, partials[orders[i][2]]); float[] distr = agg.get(); Assert.assertTrue(distr[0] < distr[1]); } }
Example 14
Source Project: incubator-hivemall File: LDAPredictUDAFTest.java License: Apache License 2.0 | 4 votes |
@Test public void testMerge() throws Exception { udaf = new LDAPredictUDAF(); inputOIs = new ObjectInspector[] { PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.STRING), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.INT), PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector( PrimitiveObjectInspector.PrimitiveCategory.FLOAT), ObjectInspectorUtils.getConstantObjectInspector( PrimitiveObjectInspectorFactory.javaStringObjectInspector, "-topics 2")}; evaluator = udaf.getEvaluator(new SimpleGenericUDAFParameterInfo(inputOIs, false, false)); agg = (LDAPredictUDAF.OnlineLDAPredictAggregationBuffer) evaluator.getNewAggregationBuffer(); final Map<String, Float> doc = new HashMap<String, Float>(); doc.put("apples", 1.f); doc.put("avocados", 1.f); doc.put("colds", 1.f); doc.put("flu", 1.f); doc.put("like", 2.f); doc.put("oranges", 1.f); Object[] partials = new Object[3]; // bin #1 evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); for (int i = 0; i < 6; i++) { evaluator.iterate(agg, new Object[] {words[i], doc.get(words[i]), labels[i], lambdas[i]}); } partials[0] = evaluator.terminatePartial(agg); // bin #2 evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); for (int i = 6; i < 12; i++) { evaluator.iterate(agg, new Object[] {words[i], doc.get(words[i]), labels[i], lambdas[i]}); } partials[1] = evaluator.terminatePartial(agg); // bin #3 evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL1, inputOIs); evaluator.reset(agg); for (int i = 12; i < 18; i++) { evaluator.iterate(agg, new Object[] {words[i], doc.get(words[i]), labels[i], lambdas[i]}); } partials[2] = evaluator.terminatePartial(agg); // merge in a different order final int[][] orders = new int[][] {{0, 1, 2}, {1, 0, 2}, {1, 2, 0}, {2, 1, 0}}; for (int i = 0; i < orders.length; i++) { evaluator.init(GenericUDAFEvaluator.Mode.PARTIAL2, partialOI); evaluator.reset(agg); evaluator.merge(agg, partials[orders[i][0]]); evaluator.merge(agg, partials[orders[i][1]]); evaluator.merge(agg, partials[orders[i][2]]); float[] distr = agg.get(); Assert.assertTrue(distr[0] < distr[1]); } }
Example 15
Source Project: Cobol-to-Hive File: CobolNumberField.java License: Apache License 2.0 | 4 votes |
@Override public Object deserialize(byte[] rowBytes) throws CobolSerdeException { byte[] temp = transcodeField(super.getBytes(rowBytes)); String s1 = new String(temp); if (this.compType > 0) { if (this.compType == 3) { s1 = unpackData(super.getBytes(rowBytes), this.decimalLocation); }else if(this.compType == 4){ s1 = getBinary(super.getBytes(rowBytes), this.decimalLocation); } } //} else if (this.decimalLocation > 0) { else { //Now calling unpackSign on all numeric fields for which compType resolves to 0. // //The function will check to see if the least significant byte has been overpunched with a sign and //return a negative number if a negative sign is found. s1 = unpackSign(super.getBytes(rowBytes), this.decimalLocation); } // else if (this.decimalLocation > 0) { // s1 = s1.substring(0, this.length * this.divideFactor // - this.decimalLocation) // + "." // + s1.substring(this.length * this.divideFactor // - this.decimalLocation); // } // System.out.println(name + "\t - " + s1 + "\t:" + offset + "\[email protected]" // + length); try { switch (((PrimitiveTypeInfo) this.typeInfo).getPrimitiveCategory()) { case LONG: return Long.parseLong(s1.trim()); case SHORT: return Short.parseShort(s1.trim()); case INT: return Integer.parseInt(s1.trim()); case BYTE: return Byte.parseByte(s1.trim()); case FLOAT: return Float.parseFloat(s1.trim()); case DOUBLE: return Double.parseDouble(s1.trim()); case DECIMAL: BigDecimal bd = new BigDecimal(s1); HiveDecimal dec = HiveDecimal.create(bd); JavaHiveDecimalObjectInspector oi = (JavaHiveDecimalObjectInspector) PrimitiveObjectInspectorFactory .getPrimitiveJavaObjectInspector((DecimalTypeInfo) this.typeInfo); return oi.set(null, dec); } } catch (Exception e) { return null; // if cannot be converted make it null } return null; }