Java Code Examples for org.apache.hadoop.hive.ql.metadata.HiveException

The following are top voted examples for showing how to use org.apache.hadoop.hive.ql.metadata.HiveException. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to generate more good examples.
Example 1
Project: kafka-connect-hdfs   File: AvroHiveUtil.java   View source code 6 votes vote down vote up
private Table constructAvroTable(String database, String tableName, Schema schema, Partitioner partitioner)
    throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(avroSerde);
  try {
    table.setInputFormatClass(avroInputFormat);
    table.setOutputFormatClass(avroOutputFormat);
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  table.getParameters().put(AVRO_SCHEMA_LITERAL, avroData.fromConnectSchema(schema).toString());
  return table;
}
 
Example 2
Project: kafka-connect-hdfs   File: ParquetHiveUtil.java   View source code 6 votes vote down vote up
private Table constructParquetTable(String database, String tableName, Schema schema, Partitioner partitioner) throws HiveMetaStoreException {
  Table table = newTable(database, tableName);
  table.setTableType(TableType.EXTERNAL_TABLE);
  table.getParameters().put("EXTERNAL", "TRUE");
  String tablePath = FileUtils.hiveDirectoryName(url, topicsDir, tableName);
  table.setDataLocation(new Path(tablePath));
  table.setSerializationLib(getHiveParquetSerde());
  try {
    table.setInputFormatClass(getHiveParquetInputFormat());
    table.setOutputFormatClass(getHiveParquetOutputFormat());
  } catch (HiveException e) {
    throw new HiveMetaStoreException("Cannot find input/output format:", e);
  }
  // convert copycat schema schema to Hive columns
  List<FieldSchema> columns = HiveSchemaConverter.convertSchema(schema);
  table.setFields(columns);
  table.setPartCols(partitioner.partitionFields());
  return table;
}
 
Example 3
Project: Transwarp-Sample-Code   File: udtfCheck.java   View source code 6 votes vote down vote up
@Override
public void process(Object[] record) throws HiveException {
    final String document = (String) stringOI.getPrimitiveJavaObject(record[0]);

    if (document == null) {
        return;
    }

    String[] tokens = document.split(",");
    String[] results = tokens[1].split(" ");

    try {
        hTable = new HTable(conf, "bi");
        Get get = new Get(Bytes.toBytes(tokens[0]));
        result = hTable.exists(get);
    } catch (Exception e) {
        e.printStackTrace();
    }

    if (!result) {
        for (String r : results) {
            forward(new Object[]{tokens[0], r});
        }
    }
}
 
Example 4
Project: hive-udf-backports   File: GenericUDFNextDay.java   View source code 6 votes vote down vote up
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  int dayOfWeekInt;
  if (isDayOfWeekConst) {
    dayOfWeekInt = dayOfWeekIntConst;
  } else {
    String dayOfWeek = getStringValue(arguments, 1, converters);
    dayOfWeekInt = getIntDayOfWeek(dayOfWeek);
  }
  if (dayOfWeekInt == -1) {
    return null;
  }

  Date date = getDateValue(arguments, 0, inputTypes, converters);
  if (date == null) {
    return null;
  }

  nextDay(date, dayOfWeekInt);
  Date newDate = calendar.getTime();
  output.set(BackportUtils.getDateFormat().format(newDate));
  return output;
}
 
Example 5
Project: hive-udf-backports   File: GenericUDFDateFormat.java   View source code 6 votes vote down vote up
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (formatter == null) {
    return null;
  }
  // the function should support both short date and full timestamp format
  // time part of the timestamp should not be skipped
  Date date = getTimestampValue(arguments, 0, tsConverters);
  if (date == null) {
    date = getDateValue(arguments, 0, dtInputTypes, dtConverters);
    if (date == null) {
      return null;
    }
  }

  String res = formatter.format(date);
  if (res == null) {
    return null;
  }
  output.set(res);
  return output;
}
 
Example 6
Project: hive-udf-backports   File: GenericUDFSha2.java   View source code 6 votes vote down vote up
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  if (digest == null) {
    return null;
  }

  digest.reset();
  if (isStr) {
    Text n = getTextValue(arguments, 0, converters);
    if (n == null) {
      return null;
    }
    digest.update(n.getBytes(), 0, n.getLength());
  } else {
    BytesWritable bWr = getBinaryValue(arguments, 0, converters);
    if (bWr == null) {
      return null;
    }
    digest.update(bWr.getBytes(), 0, bWr.getLength());
  }
  byte[] resBin = digest.digest();
  String resStr = Hex.encodeHexString(resBin);

  output.set(resStr);
  return output;
}
 
Example 7
Project: hive-udf-backports   File: GenericUDFAddMonths.java   View source code 6 votes vote down vote up
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  Integer numMonthV;
  if (isNumMonthsConst) {
    numMonthV = numMonthsConst;
  } else {
    numMonthV = getIntValue(arguments, 1, converters);
  }

  if (numMonthV == null) {
    return null;
  }

  int numMonthInt = numMonthV.intValue();
  Date date = getDateValue(arguments, 0, inputTypes, converters);
  if (date == null) {
    return null;
  }

  addMonth(date, numMonthInt);
  Date newDate = calendar.getTime();
  output.set(getDateFormat().format(newDate));
  return output;
}
 
Example 8
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  Byte value = (Byte)columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    Byte transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 9
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  Date value = columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    Date transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 10
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  HiveChar value = columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    String transformedValue = transformer.transform(value.getValue());

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 11
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  HiveVarchar value = columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    String transformedValue = transformer.transform(value.getValue());

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 12
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  Integer value = (Integer)columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    Integer transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 13
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  Long value = (Long)columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    Long transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 14
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  Short value = (Short)columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    Short transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 15
Project: hive-udf-backports   File: BaseMaskUDF.java   View source code 6 votes vote down vote up
@Override
public Object getTransformedWritable(DeferredObject object) throws HiveException {
  String value = columnType.getPrimitiveJavaObject(object.get());

  if(value != null) {
    String transformedValue = transformer.transform(value);

    if(transformedValue != null) {
      writable.set(transformedValue);

      return writable;
    }
  }

  return null;
}
 
Example 16
Project: hive-udf-backports   File: GenericUDFSoundex.java   View source code 6 votes vote down vote up
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  Object obj0;
  if ((obj0 = arguments[0].get()) == null) {
    return null;
  }

  String str0 = textConverters[0].convert(obj0).toString();
  String soundexCode;
  try {
    soundexCode = soundex.soundex(str0);
  } catch (IllegalArgumentException e) {
    return null;
  }
  output.set(soundexCode);
  return output;
}
 
Example 17
Project: hive-jq-udtf   File: JsonQueryUDTFTest.java   View source code 6 votes vote down vote up
@Test
public void testSingleColumn2() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
			toConstantOI("struct<name:string,offset:int>"),
	});
	assertEquals("struct<col1:struct<name:string,offset:int>>", oi.getTypeName());

	final List<Object> results = evaluate(sut, toObject(TEST_JSON));
	assertEquals(3, results.size());

	final HivePath namePath = new HivePath(oi, ".col1.name");
	final HivePath offsetPath = new HivePath(oi, ".col1.offset");

	assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
	assertEquals(540, offsetPath.extract(results.get(0)).asInt());

	assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
	assertEquals(480, offsetPath.extract(results.get(1)).asInt());

	assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
	assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
 
Example 18
Project: hive-jq-udtf   File: JsonQueryUDTFTest.java   View source code 6 votes vote down vote up
@Test
public void testMultiColumn() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI(".region as $region | .timezones[] | {name: ($region + \"/\" + .name), offset}"),
			toConstantOI("name:string"),
			toConstantOI("offset:int"),
	});
	assertEquals("struct<name:string,offset:int>", oi.getTypeName());

	final List<Object> results = evaluate(sut, toObject(TEST_JSON));
	assertEquals(3, results.size());

	final HivePath namePath = new HivePath(oi, ".name");
	final HivePath offsetPath = new HivePath(oi, ".offset");

	assertEquals("Asia/Tokyo", namePath.extract(results.get(0)).asString());
	assertEquals(540, offsetPath.extract(results.get(0)).asInt());

	assertEquals("Asia/Taipei", namePath.extract(results.get(1)).asString());
	assertEquals(480, offsetPath.extract(results.get(1)).asInt());

	assertEquals("Asia/Kamchatka", namePath.extract(results.get(2)).asString());
	assertEquals(720, offsetPath.extract(results.get(2)).asInt());
}
 
Example 19
Project: hive-jq-udtf   File: JsonQueryUDTFTest.java   View source code 6 votes vote down vote up
@Test
public void testAbortOnError() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();
	@SuppressWarnings("unused")
	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("if $error then error($error.message) else . end"),
			toConstantOI("string"),
	});

	try {
		evaluate(sut, toObject("\"corrupt \"string"));
		fail("should fail");
	} catch (final HiveException e) {
		assertTrue(e.getMessage().contains("Unrecognized token 'string'"));
	}
}
 
Example 20
Project: hive-jq-udtf   File: JsonQueryUDTFTest.java   View source code 6 votes vote down vote up
@Test
public void testMoreOnStringOutputConversions() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("{foo: {a: 1}, bar: null, baz: \"baz\"}"),
			toConstantOI("foo:string"),
			toConstantOI("bar:string"),
			toConstantOI("baz:string"),
	});

	final List<Object> results = evaluate(sut, toObject("null"));
	assertEquals(1, results.size());

	final Object obj = results.get(0);
	assertEquals("{\"a\":1}", new HivePath(oi, ".foo").extract(obj).asString());
	assertTrue(new HivePath(oi, ".bar").extract(obj).isNull());
	assertEquals("baz", new HivePath(oi, ".baz").extract(obj).asString());
}
 
Example 21
Project: hive-jq-udtf   File: JsonQueryUDTFTest.java   View source code 6 votes vote down vote up
@Test
public void testNullInputs() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("."),
			toConstantOI("string"),
	});

	final List<Object> results = evaluate(sut, toObject("null"), null, toObject(null));
	assertEquals(3, results.size());

	assertTrue(new HivePath(oi, ".col1").extract(results.get(0)).isNull());
	assertTrue(new HivePath(oi, ".col1").extract(results.get(1)).isNull());
	assertTrue(new HivePath(oi, ".col1").extract(results.get(2)).isNull());
}
 
Example 22
Project: hive-jq-udtf   File: JsonQueryUDTFTest.java   View source code 6 votes vote down vote up
@Test
public void testMissingFieldsInConversions() throws HiveException {
	final JsonQueryUDTF sut = new JsonQueryUDTF();

	final StructObjectInspector oi = sut.initialize(new ObjectInspector[] {
			PrimitiveObjectInspectorFactory.writableStringObjectInspector,
			toConstantOI("{foo: 10}"),
			toConstantOI("foo:int"),
			toConstantOI("bar:int"),
	});

	final List<Object> results = evaluate(sut, toObject(null));
	assertEquals(1, results.size());

	assertEquals(10, new HivePath(oi, ".foo").extract(results.get(0)).asInt());
	assertTrue(new HivePath(oi, ".bar").extract(results.get(0)).isNull());
}
 
Example 23
Project: incubator-hivemall   File: CollectAllUDAF.java   View source code 6 votes vote down vote up
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters) throws HiveException {
    super.init(m, parameters);
    if (m == Mode.PARTIAL1) {
        inputOI = parameters[0];
        return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorUtils.getStandardObjectInspector(inputOI));
    } else {
        if (!(parameters[0] instanceof StandardListObjectInspector)) {
            inputOI = ObjectInspectorUtils.getStandardObjectInspector(parameters[0]);
            return (StandardListObjectInspector) ObjectInspectorFactory.getStandardListObjectInspector(inputOI);
        } else {
            internalMergeOI = (StandardListObjectInspector) parameters[0];
            inputOI = internalMergeOI.getListElementObjectInspector();
            loi = (StandardListObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(internalMergeOI);
            return loi;
        }
    }
}
 
Example 24
Project: incubator-hivemall   File: RandomizedAmplifier.java   View source code 6 votes vote down vote up
public void sweepAll() throws HiveException {
    if (position < numBuffers && position > 1) {// shuffle an unfilled buffer
        for (int x = 0; x < xtimes; x++) {
            ArrayUtils.shuffle(slots[x], position, randoms[x]);
        }
    }
    for (int i = 0; i < numBuffers; i++) {
        for (int x = 0; x < xtimes; x++) {
            AgedObject<T>[] slot = slots[x];
            AgedObject<T> sweepedObj = slot[i];
            if (sweepedObj != null) {
                dropout(sweepedObj.object);
                slot[i] = null;
            }
        }
    }
}
 
Example 25
Project: incubator-hivemall   File: BuildBinsUDAF.java   View source code 6 votes vote down vote up
@Override
public void merge(@SuppressWarnings("deprecation") AggregationBuffer agg, Object other)
        throws HiveException {
    if (other == null) {
        return;
    }

    final BuildBinsAggregationBuffer myAgg = (BuildBinsAggregationBuffer) agg;

    myAgg.autoShrink = autoShrinkOI.get(structOI.getStructFieldData(other, autoShrinkField));

    final List<?> histogram = ((LazyBinaryArray) structOI.getStructFieldData(other,
        histogramField)).getList();
    myAgg.histogram.merge(histogram, histogramElOI);

    final double[] quantiles = HiveUtils.asDoubleArray(
        structOI.getStructFieldData(other, quantilesField), quantilesOI, quantileOI);
    if (quantiles != null && quantiles.length > 0) {
        myAgg.quantiles = quantiles;
    }
}
 
Example 26
Project: incubator-hivemall   File: RandomForestEnsembleUDAF.java   View source code 6 votes vote down vote up
@Override
public void iterate(AggregationBuffer agg, Object[] parameters) throws HiveException {
    RfAggregationBufferV2 buf = (RfAggregationBufferV2) agg;

    Preconditions.checkNotNull(parameters[0]);
    int yhat = PrimitiveObjectInspectorUtils.getInt(parameters[0], yhatOI);
    Preconditions.checkNotNull(parameters[1]);
    double[] posteriori = HiveUtils.asDoubleArray(parameters[1], posterioriOI,
        posterioriElemOI);

    double weight = 1.0d;
    if (parameters.length == 3) {
        Preconditions.checkNotNull(parameters[2]);
        weight = PrimitiveObjectInspectorUtils.getDouble(parameters[2], weightOI);
    }

    buf.iterate(yhat, weight, posteriori);
}
 
Example 27
Project: incubator-hivemall   File: ArrayAvgGenericUDAF.java   View source code 6 votes vote down vote up
void doIterate(@Nonnull final Object tuple, @Nonnull ListObjectInspector listOI,
        @Nonnull PrimitiveObjectInspector elemOI) throws HiveException {
    final int size = listOI.getListLength(tuple);
    if (_size == -1) {
        init(size);
    }
    if (size != _size) {// a corner case
        throw new HiveException("Mismatch in the number of elements at tuple: "
                + tuple.toString());
    }
    final double[] sum = _sum;
    final long[] count = _count;
    for (int i = 0, len = size; i < len; i++) {
        Object o = listOI.getListElement(tuple, i);
        if (o != null) {
            double v = PrimitiveObjectInspectorUtils.getDouble(o, elemOI);
            sum[i] += v;
            count[i] += 1L;
        }
    }
}
 
Example 28
Project: incubator-hivemall   File: XGBoostUDTF.java   View source code 6 votes vote down vote up
@Override
public void process(@Nonnull Object[] args) throws HiveException {
    if (args[0] == null) {
        return;
    }

    // TODO: Need to support dense inputs
    final List<?> features = (List<?>) featureListOI.getList(args[0]);
    final String[] fv = new String[features.size()];
    for (int i = 0; i < features.size(); i++) {
        fv[i] = (String) featureElemOI.getPrimitiveJavaObject(features.get(i));
    }
    double target = PrimitiveObjectInspectorUtils.getDouble(args[1], this.targetOI);
    checkTargetValue(target);
    final LabeledPoint point = XGBoostUtils.parseFeatures(target, fv);
    if (point != null) {
        this.featuresList.add(point);
    }
}
 
Example 29
Project: incubator-hivemall   File: BprSamplingUDTF.java   View source code 6 votes vote down vote up
@Override
public void close() throws HiveException {
    int feedbacks = feedback.getTotalFeedbacks();
    if (feedbacks == 0) {
        return;
    }
    int numSamples = (int) (feedbacks * samplingRate);

    if (pairSampling) {
        PerEventPositiveOnlyFeedback evFeedback = (PerEventPositiveOnlyFeedback) feedback;
        if (withoutReplacement) {
            uniformPairSamplingWithoutReplacement(evFeedback, numSamples);
        } else {
            uniformPairSamplingWithReplacement(evFeedback, numSamples);
        }
    } else {
        if (withoutReplacement) {
            uniformUserSamplingWithoutReplacement(feedback, numSamples);
        } else {
            uniformUserSamplingWithReplacement(feedback, numSamples);
        }
    }
}
 
Example 30
Project: incubator-hivemall   File: ToDenseFeaturesUDF.java   View source code 6 votes vote down vote up
@Nullable
public List<Float> evaluate(@Nullable final List<String> features,
        @Nonnegative final int dimensions) throws HiveException {
    if (features == null) {
        return null;
    }

    final FeatureValue probe = new FeatureValue();
    final Float[] fv = new Float[dimensions + 1];
    for (String ft : features) {
        FeatureValue.parseFeatureAsString(ft, probe);
        String f = probe.getFeature();
        int i = Integer.parseInt(f);
        if (i > dimensions) {
            throw new HiveException("IndexOutOfBounds: " + i);
        }
        float v = probe.getValueAsFloat();
        fv[i] = new Float(v);
    }
    return Arrays.asList(fv);

}
 
Example 31
Project: incubator-hivemall   File: SingularSpectrumTransformTest.java   View source code 6 votes vote down vote up
private static int detectSST(@Nonnull final ScoreFunction scoreFunc,
        @Nonnull final double threshold) throws IOException, HiveException {
    Parameters params = new Parameters();
    params.set(scoreFunc);
    PrimitiveObjectInspector oi = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
    SingularSpectrumTransform sst = new SingularSpectrumTransform(params, oi);
    double[] outScores = new double[1];

    BufferedReader reader = readFile("cf1d.csv.gz");
    println("x change");
    String line;
    int numChangepoints = 0;
    while ((line = reader.readLine()) != null) {
        double x = Double.parseDouble(line);
        sst.update(x, outScores);
        printf("%f %f%n", x, outScores[0]);
        if (outScores[0] > threshold) {
            numChangepoints++;
        }
    }

    return numChangepoints;
}
 
Example 32
Project: incubator-hivemall   File: XGBoostMulticlassPredictUDTF.java   View source code 6 votes vote down vote up
@Override
protected void forwardPredicted(@Nonnull final List<LabeledPointWithRowId> testData,
        @Nonnull final float[][] predicted) throws HiveException {
    Preconditions.checkArgument(predicted.length == testData.size(), HiveException.class);

    final Object[] forwardObj = new Object[3];
    for (int i = 0, size = testData.size(); i < size; i++) {
        final float[] predicted_i = predicted[i];
        final String rowId = testData.get(i).getRowId();
        forwardObj[0] = rowId;

        assert (predicted_i.length > 1);
        for (int j = 0; j < predicted_i.length; j++) {
            forwardObj[1] = j;
            float prob = predicted_i[j];
            forwardObj[2] = prob;
            forward(forwardObj);
        }
    }
}
 
Example 33
Project: incubator-hivemall   File: FMPredictGenericUDAF.java   View source code 6 votes vote down vote up
@Override
public void iterate(@SuppressWarnings("deprecation") AggregationBuffer agg,
        Object[] parameters) throws HiveException {
    if (parameters[0] == null) {
        return;
    }
    FMPredictAggregationBuffer buf = (FMPredictAggregationBuffer) agg;

    double w = PrimitiveObjectInspectorUtils.getDouble(parameters[0], wOI);
    if (parameters[1] == null || /* for TD */vOI.getListLength(parameters[1]) == 0) {// Vif was null
        buf.iterate(w);
    } else {
        if (parameters[2] == null) {
            throw new UDFArgumentException("The third argument Xj must not be null");
        }
        double x = PrimitiveObjectInspectorUtils.getDouble(parameters[2], xOI);
        buf.iterate(w, x, parameters[1], vOI, vElemOI);
    }
}
 
Example 34
Project: incubator-hivemall   File: KuromojiUDFTest.java   View source code 6 votes vote down vote up
@Test
public void testEvaluateOneRow() throws IOException, HiveException {
    KuromojiUDF udf = new KuromojiUDF();
    ObjectInspector[] argOIs = new ObjectInspector[1];
    // line
    argOIs[0] = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    udf.initialize(argOIs);

    DeferredObject[] args = new DeferredObject[1];
    args[0] = new DeferredObject() {
        public Text get() throws HiveException {
            return new Text("クロモジのJapaneseAnalyzerを使ってみる。テスト。");
        }

        @Override
        public void prepare(int arg) throws HiveException {}
    };
    List<Text> tokens = udf.evaluate(args);
    Assert.assertNotNull(tokens);
    Assert.assertEquals(5, tokens.size());
    udf.close();
}
 
Example 35
Project: incubator-hivemall   File: VectorizeFeaturesUDFTest.java   View source code 6 votes vote down vote up
@Test
public void testTwoArguments() throws HiveException, IOException {
    VectorizeFeaturesUDF udf = new VectorizeFeaturesUDF();
    ObjectInspector[] argOIs = new ObjectInspector[3];
    List<String> featureNames = Arrays.asList("a", "b");
    argOIs[0] = ObjectInspectorFactory.getStandardConstantListObjectInspector(
        PrimitiveObjectInspectorFactory.javaStringObjectInspector, featureNames);
    argOIs[1] = PrimitiveObjectInspectorFactory.javaDoubleObjectInspector;
    argOIs[2] = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
    udf.initialize(argOIs);

    DeferredObject[] arguments = new DeferredObject[3];
    arguments[1] = new DeferredJavaObject(new Double(0.1));
    arguments[2] = new DeferredJavaObject("1.1");

    List<Text> actuals = udf.evaluate(arguments);
    //System.out.println(actuals);        
    List<Text> expected = WritableUtils.val("a:0.1", "b:1.1");
    Assert.assertEquals(expected, actuals);

    udf.close();
}
 
Example 36
Project: incubator-hivemall   File: PLSAPredictUDAF.java   View source code 6 votes vote down vote up
@Override
public void iterate(@SuppressWarnings("deprecation") AggregationBuffer agg,
        Object[] parameters) throws HiveException {
    PLSAPredictAggregationBuffer myAggr = (PLSAPredictAggregationBuffer) agg;

    if (parameters[0] == null || parameters[1] == null || parameters[2] == null
            || parameters[3] == null) {
        return;
    }

    String word = PrimitiveObjectInspectorUtils.getString(parameters[0], wordOI);
    float value = PrimitiveObjectInspectorUtils.getFloat(parameters[1], valueOI);
    int label = PrimitiveObjectInspectorUtils.getInt(parameters[2], labelOI);
    float prob = PrimitiveObjectInspectorUtils.getFloat(parameters[3], probOI);

    myAggr.iterate(word, value, label, prob);
}
 
Example 37
Project: incubator-hivemall   File: LogisticRegressionDataGeneratorUDTF.java   View source code 6 votes vote down vote up
private void flushBuffered(int position) throws HiveException {
    final Object[] forwardObjs = new Object[2];
    if (dense) {
        for (int i = 0; i < position; i++) {
            forwardObjs[0] = Float.valueOf(labels[i]);
            forwardObjs[1] = Arrays.asList(featuresFloatArray[i]);
            forward(forwardObjs);
        }
    } else {
        for (int i = 0; i < position; i++) {
            forwardObjs[0] = Float.valueOf(labels[i]);
            forwardObjs[1] = Arrays.asList(featuresArray[i]);
            forward(forwardObjs);
        }
    }
}
 
Example 38
Project: incubator-hivemall   File: RandomForestClassifierUDTF.java   View source code 6 votes vote down vote up
@Override
public void close() throws HiveException {
    this._progressReporter = getReporter();
    this._treeBuildTaskCounter = (_progressReporter == null) ? null
            : _progressReporter.getCounter("hivemall.smile.RandomForestClassifier$Counter",
                "finishedTreeBuildTasks");
    reportProgress(_progressReporter);

    if (!labels.isEmpty()) {
        Matrix x = matrixBuilder.buildMatrix();
        this.matrixBuilder = null;
        int[] y = labels.toArray();
        this.labels = null;

        // run training
        train(x, y);
    }

    // clean up
    this.featureListOI = null;
    this.featureElemOI = null;
    this.labelOI = null;
    this._attributes = null;
}
 
Example 39
Project: incubator-hivemall   File: PopulateNotInUDTF.java   View source code 6 votes vote down vote up
@Override
public void process(Object[] args) throws HiveException {
    Object arg0 = args[0];
    if (arg0 == null || listOI.getListLength(arg0) == 0) {
        populateAll();
        return;
    }

    final BitSet bits;
    if (bitsetInput) {
        long[] longs = HiveUtils.asLongArray(arg0, listOI, listElemOI);
        bits = BitSet.valueOf(longs);
    } else {
        if (_bitset == null) {
            bits = new BitSet();
            this._bitset = bits;
        } else {
            bits = _bitset;
            bits.clear();
        }
        HiveUtils.setBits(arg0, listOI, listElemOI, bits);
    }

    populateItems(bits);
}
 
Example 40
Project: incubator-hivemall   File: Lat2TileYUDF.java   View source code 6 votes vote down vote up
@Override
public IntWritable evaluate(DeferredObject[] arguments) throws HiveException {
    Object arg0 = arguments[0].get();
    Object arg1 = arguments[1].get();

    if (arg0 == null) {
        return null;
    }
    if (arg1 == null) {
        throw new UDFArgumentException("zoom level should not be null");
    }

    double lat = PrimitiveObjectInspectorUtils.getDouble(arg0, latOI);
    int zoom = PrimitiveObjectInspectorUtils.getInt(arg1, zoomOI);
    Preconditions.checkArgument(zoom >= 0, "Invalid zoom level", UDFArgumentException.class);

    final int y;
    try {
        y = GeoSpatialUtils.lat2tiley(lat, zoom);
    } catch (IllegalArgumentException ex) {
        throw new UDFArgumentException(ex);
    }

    result.set(y);
    return result;
}