Java Code Examples for org.apache.hadoop.hive.common.type.HiveDecimal

The following examples show how to use org.apache.hadoop.hive.common.type.HiveDecimal. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: presto   Source File: GenericHiveRecordCursor.java    License: Apache License 2.0 6 votes vote down vote up
private void parseDecimalColumn(int column)
{
    loaded[column] = true;

    Object fieldData = rowInspector.getStructFieldData(rowData, structFields[column]);

    if (fieldData == null) {
        nulls[column] = true;
    }
    else {
        Object fieldValue = ((PrimitiveObjectInspector) fieldInspectors[column]).getPrimitiveJavaObject(fieldData);
        checkState(fieldValue != null, "fieldValue should not be null");

        HiveDecimal decimal = (HiveDecimal) fieldValue;
        DecimalType columnType = (DecimalType) types[column];
        BigInteger unscaledDecimal = rescale(decimal.unscaledValue(), decimal.scale(), columnType.getScale());

        if (columnType.isShort()) {
            longs[column] = unscaledDecimal.longValue();
        }
        else {
            slices[column] = Decimals.encodeUnscaledValue(unscaledDecimal);
        }
        nulls[column] = false;
    }
}
 
Example 2
Source Project: presto   Source File: TestDataWritableWriter.java    License: Apache License 2.0 6 votes vote down vote up
private Binary decimalToBinary(HiveDecimal hiveDecimal, DecimalTypeInfo decimalTypeInfo)
{
    int prec = decimalTypeInfo.precision();
    int scale = decimalTypeInfo.scale();
    byte[] decimalBytes = hiveDecimal.setScale(scale).unscaledValue().toByteArray();

    // Estimated number of bytes needed.
    int precToBytes = ParquetHiveSerDe.PRECISION_TO_BYTE_COUNT[prec - 1];
    if (precToBytes == decimalBytes.length) {
        // No padding needed.
        return Binary.fromByteArray(decimalBytes);
    }

    byte[] tgt = new byte[precToBytes];
    if (hiveDecimal.signum() == -1) {
        // For negative number, initializing bits to 1
        for (int i = 0; i < precToBytes; i++) {
            tgt[i] |= 0xFF;
        }
    }

    System.arraycopy(decimalBytes, 0, tgt, precToBytes - decimalBytes.length, decimalBytes.length); // Padding leading zeroes/ones.
    return Binary.fromByteArray(tgt);
}
 
Example 3
Source Project: presto   Source File: TestParquetDecimalScaling.java    License: Apache License 2.0 6 votes vote down vote up
private Object convertValue(String value)
{
    BigDecimal bigValue = new BigDecimal(value).setScale(scale, UNNECESSARY);

    if (!forceFixedLengthArray && precision > 0 && precision < 10) {
        return bigValue.unscaledValue().intValue();
    }

    if (!forceFixedLengthArray && precision >= 10 && precision < 18) {
        return bigValue.unscaledValue().longValue();
    }

    if (precision > 38 || precision < 0) {
        throw new IllegalArgumentException("Scale could not be greater than 38 or less than 0");
    }

    return HiveDecimal.create(bigValue);
}
 
Example 4
Source Project: localization_nifi   Source File: OrcFlowFileWriter.java    License: Apache License 2.0 6 votes vote down vote up
@Override
void write(Object obj) throws IOException {
    super.write(obj);
    if (obj != null) {
        HiveDecimal decimal = ((HiveDecimalObjectInspector) inspector).getPrimitiveJavaObject(obj);
        if (decimal == null) {
            return;
        }
        SerializationUtils.writeBigInteger(valueStream,
                decimal.unscaledValue());
        scaleStream.write(decimal.scale());
        indexStatistics.updateDecimal(decimal);
        if (createBloomFilter) {
            bloomFilter.addString(decimal.toString());
        }
    }
}
 
Example 5
Source Project: pxf   Source File: HiveORCSearchArgumentBuilder.java    License: Apache License 2.0 6 votes vote down vote up
private static Object boxLiteral(Object literal) {
    if (literal instanceof String ||
            literal instanceof Long ||
            literal instanceof Double ||
            literal instanceof Date ||
            literal instanceof Timestamp ||
            literal instanceof HiveDecimal ||
            literal instanceof BigDecimal ||
            literal instanceof Boolean) {
        return literal;
    } else if (literal instanceof HiveChar ||
            literal instanceof HiveVarchar) {
        return StringUtils.stripEnd(literal.toString(), null);
    } else if (literal instanceof Byte ||
            literal instanceof Short ||
            literal instanceof Integer) {
        return ((Number) literal).longValue();
    } else if (literal instanceof Float) {
        // to avoid change in precision when upcasting float to double
        // we convert the literal to string and parse it as double. (HIVE-8460)
        return Double.parseDouble(literal.toString());
    } else {
        throw new IllegalArgumentException("Unknown type for literal " +
                literal);
    }
}
 
Example 6
private Object convertDecimalTypes(Object val, String javaColType) {
  HiveDecimal hd = (HiveDecimal) val;
  BigDecimal bd = hd.bigDecimalValue();

  if (javaColType.equals(BIG_DECIMAL_TYPE)) {
    return bd;
  } else if (javaColType.equals(STRING_TYPE)) {
    String bdStr = null;
    if (bigDecimalFormatString) {
      bdStr = bd.toPlainString();
    } else {
      bdStr = bd.toString();
    }
    return bdStr;
  }
  return null;
}
 
Example 7
public void testNumberTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, 0, 0,
      "1000", new BigDecimal("1000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, 0, 0,
      "2000", new BigDecimal("2000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.DECIMAL, 18, 2,
        HiveDecimal.create(new BigDecimal("2000")),
        new BigDecimal("2000"), KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  setExtraArgs(addlArgsArray);
  runHCatImport(addlArgsArray, TOTAL_RECORDS, table, cols, null);
}
 
Example 8
public void testNumberTypes() throws Exception {
  final int TOTAL_RECORDS = 1 * 10;
  String table = getTableName().toUpperCase();
  ColumnGenerator[] cols = new ColumnGenerator[] {
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0),
      "numeric(18,2)", Types.NUMERIC, HCatFieldSchema.Type.STRING, 0, 0,
      "1000", new BigDecimal("1000"), KeyType.NOT_A_KEY),
    HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1),
      "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.STRING, 0, 0,
      "2000", new BigDecimal("2000"), KeyType.NOT_A_KEY),
      HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(2),
        "decimal(18,2)", Types.DECIMAL, HCatFieldSchema.Type.DECIMAL, 18, 2,
        HiveDecimal.create(new BigDecimal("2000")),
        new BigDecimal("2000"), KeyType.NOT_A_KEY),
  };
  List<String> addlArgsArray = new ArrayList<String>();
  runHCatExport(addlArgsArray, TOTAL_RECORDS, table, cols);
}
 
Example 9
Source Project: hadoop-etl-udfs   Source File: HdfsSerDeImportService.java    License: MIT License 6 votes vote down vote up
private static Object getJavaObjectFromPrimitiveData(Object data, ObjectInspector objInsp) {
    assert(objInsp.getCategory() == Category.PRIMITIVE);
    if (data == null) {
        return null;
    }
    if (data instanceof BytesWritable && objInsp instanceof WritableHiveDecimalObjectInspector) {
        // BytesWritable cannot be directly cast to HiveDecimalWritable
        WritableHiveDecimalObjectInspector oi = (WritableHiveDecimalObjectInspector) objInsp;
        data = oi.create(((BytesWritable) data).getBytes(), oi.scale());
    }
    Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
    if (obj instanceof HiveDecimal) {
        obj = ((HiveDecimal) obj).bigDecimalValue();
    } else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
        obj = obj.toString();
    } else if (obj instanceof byte[]) {
        obj = Hex.encodeHexString((byte[]) obj);
    }
    return obj;
}
 
Example 10
Source Project: hadoop-etl-udfs   Source File: HdfsSerDeImportService.java    License: MIT License 6 votes vote down vote up
private static Object getJavaObjectFromFieldData(Object data, ObjectInspector objInsp) {
    if (data == null) {
        return null;
    }
    if (objInsp.getCategory() == Category.PRIMITIVE) {
        Object obj = ObjectInspectorUtils.copyToStandardJavaObject(data, objInsp);
        if (obj instanceof HiveDecimal) {
            obj = ((HiveDecimal) obj).bigDecimalValue();
        } else if (obj instanceof HiveVarchar || obj instanceof HiveChar) {
            obj = obj.toString();
        } else if (obj instanceof byte[]) {
            obj = Hex.encodeHexString((byte[]) obj);
        }
        return obj;
    } else if (objInsp.getCategory() == Category.LIST) {
        return getJsonArrayFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
    } else {
        return getJsonObjectFromFieldData(data, objInsp, Json.createBuilderFactory(null)).build().toString();
    }
}
 
Example 11
/**
* Read list of Bitcoin transaction outputs from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfOutputsObject object containing the list of outputs to a Bitcoin Transaction
*
* @return a list of BitcoinTransactionOutputs 
*
*/

private List<BitcoinTransactionOutput> readListOfOutputsFromTable(ListObjectInspector loi, Object listOfOutputsObject) {
int listLength=loi.getListLength(listOfOutputsObject);
List<BitcoinTransactionOutput> result=new ArrayList<>(listLength);
StructObjectInspector listOfOutputsElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
	for (int i=0;i<listLength;i++) {
		Object currentListOfOutputsObject = loi.getListElement(listOfOutputsObject,i);
		StructField valueSF = listOfOutputsElementObjectInspector.getStructFieldRef("value");
		StructField txoutscriptlengthSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscriptlength");
		StructField txoutscriptSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscript");
		if ((valueSF==null) || (txoutscriptlengthSF==null) || (txoutscriptSF==null)) {
			LOG.warn("Invalid BitcoinTransactionOutput detected at position "+i);
			return new ArrayList<>();
		}
		HiveDecimal currentValue=hdoi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,valueSF));	
		byte[] currentTxOutScriptLength=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptlengthSF));
		byte[] currentTxOutScript=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptSF));
		BitcoinTransactionOutput currentBitcoinTransactionOutput = new BitcoinTransactionOutput(currentValue.bigDecimalValue().toBigIntegerExact(),currentTxOutScriptLength,currentTxOutScript);
		result.add(currentBitcoinTransactionOutput);
	}
return result;
}
 
Example 12
/**
* Read list of Bitcoin transaction outputs from a table in Hive in any format (e.g. ORC, Parquet)
*
* @param loi ObjectInspector for processing the Object containing a list
* @param listOfOutputsObject object containing the list of outputs to a Bitcoin Transaction
*
* @return a list of BitcoinTransactionOutputs 
*
*/

private List<BitcoinTransactionOutput> readListOfOutputsFromTable(ListObjectInspector loi, Object listOfOutputsObject) {
int listLength=loi.getListLength(listOfOutputsObject);
List<BitcoinTransactionOutput> result=new ArrayList<>(listLength);
StructObjectInspector listOfOutputsElementObjectInspector = (StructObjectInspector)loi.getListElementObjectInspector();
	for (int i=0;i<listLength;i++) {
		Object currentListOfOutputsObject = loi.getListElement(listOfOutputsObject,i);
		StructField valueSF = listOfOutputsElementObjectInspector.getStructFieldRef("value");
		StructField txoutscriptlengthSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscriptlength");
		StructField txoutscriptSF = listOfOutputsElementObjectInspector.getStructFieldRef("txoutscript");
		if ((valueSF==null) || (txoutscriptlengthSF==null) || (txoutscriptSF==null)) {
			LOG.warn("Invalid BitcoinTransactionOutput detected at position "+i);
			return new ArrayList<>();
		}
		HiveDecimal currentValue=hdoi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,valueSF));	
		byte[] currentTxOutScriptLength=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptlengthSF));
		byte[] currentTxOutScript=wboi.getPrimitiveJavaObject(listOfOutputsElementObjectInspector.getStructFieldData(currentListOfOutputsObject,txoutscriptSF));
		BitcoinTransactionOutput currentBitcoinTransactionOutput = new BitcoinTransactionOutput(currentValue.bigDecimalValue().toBigIntegerExact(),currentTxOutScriptLength,currentTxOutScript);
		result.add(currentBitcoinTransactionOutput);
	}
return result;
}
 
Example 13
Source Project: hadoopcryptoledger   Source File: EthereumUDFTest.java    License: Apache License 2.0 6 votes vote down vote up
/***
 * Helper function to convert EthereumTransaction to HiveEthereumTransaction
 * 
 * @param transaction
 * @return
 */
private static HiveEthereumTransaction convertToHiveEthereumTransaction(EthereumTransaction transaction) {
HiveEthereumTransaction newTransaction = new HiveEthereumTransaction();
newTransaction.setNonce(transaction.getNonce());
newTransaction.setValue(HiveDecimal.create(transaction.getValue()));
newTransaction.setValueRaw(transaction.getValueRaw());
newTransaction.setReceiveAddress(transaction.getReceiveAddress());
newTransaction.setGasPrice(HiveDecimal.create(transaction.getGasPrice()));
newTransaction.setGasPriceRaw(transaction.getGasPriceRaw());
newTransaction.setGasLimit(HiveDecimal.create(transaction.getGasLimit()));
newTransaction.setGasLimitRaw(transaction.getGasLimitRaw());
newTransaction.setData(transaction.getData());
newTransaction.setSig_v(transaction.getSig_v());
newTransaction.setSig_r(transaction.getSig_r());
newTransaction.setSig_s(transaction.getSig_s());
return newTransaction;
}
 
Example 14
Source Project: osm2orc   Source File: OrcWriter.java    License: ISC License 6 votes vote down vote up
@Override
public void process(WayContainer container) {
    DecimalColumnVector lat = (DecimalColumnVector) batch.cols[3];
    DecimalColumnVector lon = (DecimalColumnVector) batch.cols[4];
    ListColumnVector nds = (ListColumnVector) batch.cols[5];

    checkLimit();
    addCommonProperties(container);

    lat.isNull[row] = true;
    lon.isNull[row] = true;
    lat.set(row, (HiveDecimal) null);
    lon.set(row, (HiveDecimal) null);

    Way way = container.getEntity();

    nds.lengths[row] = way.getWayNodes().size();
    nds.childCount += nds.lengths[row];
    nds.child.ensureSize(nds.childCount, nds.offsets[row] != 0);

    for (int j = 0; j < way.getWayNodes().size(); j++) {
        StructColumnVector ndsStruct = (StructColumnVector) nds.child;

        ((LongColumnVector) ndsStruct.fields[0]).vector[(int) nds.offsets[row] + j] = way.getWayNodes().get(j).getNodeId();
    }
}
 
Example 15
Source Project: nifi   Source File: TestNiFiRecordSerDe.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSimpleMap() throws SerDeException{
    testSimpleMap("string", "tinyint", RecordFieldType.BYTE.getDataType(), createMap((byte)89, (byte)2), objectMap(createMap((byte)89, (byte)2)));
    testSimpleMap("string", "smallint", RecordFieldType.SHORT.getDataType(), createMap((short)89, (short)209), objectMap(createMap((short)89, (short)209)));
    testSimpleMap("string", "int", RecordFieldType.INT.getDataType(), createMap(90, 87), objectMap(createMap(90, 87)));
    testSimpleMap("string", "bigint", RecordFieldType.BIGINT.getDataType(), createMap(87888L, 876L, 123L), objectMap(createMap(87888L, 876L, 123L)));
    testSimpleMap("string", "boolean", RecordFieldType.BOOLEAN.getDataType(), createMap(false, true, true, false), objectMap(createMap(false, true, true, false)));
    testSimpleMap("string", "float", RecordFieldType.FLOAT.getDataType(), createMap(1.2f, 8.6f, 0.125f), objectMap(createMap(1.2f, 8.6f, 0.125f)));
    testSimpleMap("string", "double", RecordFieldType.DOUBLE.getDataType(), createMap(3.142, 8.93), objectMap(createMap(3.142, 8.93)));
    testSimpleMap("string", "string", RecordFieldType.STRING.getDataType(), createMap("form", "ni", "aje"), objectMap(createMap("form", "ni", "aje")));
    testSimpleMap("string", "varchar(20)", RecordFieldType.STRING.getDataType(), createMap("niko", "kiza"), objectMap(createMap("niko", "kiza")));
    testSimpleMap("string", "char(1)", RecordFieldType.CHAR.getDataType(), createMap('a', 'b', 'c'), objectMap(createMap("a", "b", "c")));
    long now = System.currentTimeMillis();
    Date hiveDate = new Date();
    hiveDate.setTimeInMillis(now);
    Timestamp hiveTs = new Timestamp();
    hiveTs.setTimeInMillis(now);

    testSimpleMap("string", "date", RecordFieldType.DATE.getDataType(), createMap(new java.sql.Date(now)), objectMap(createMap(hiveDate)));
    testSimpleMap("string", "timestamp", RecordFieldType.TIMESTAMP.getDataType(), createMap(new java.sql.Timestamp(now)), objectMap(createMap(hiveTs)));
    testSimpleMap("string", "decimal(10,2)", RecordFieldType.DOUBLE.getDataType(), createMap(45.6, 2345.5), objectMap(createMap(
            HiveDecimal.create(45.6), HiveDecimal.create(2345.5)
    )));
}
 
Example 16
Source Project: presto   Source File: HiveWriteUtils.java    License: Apache License 2.0 5 votes vote down vote up
private static HiveDecimal getHiveDecimal(DecimalType decimalType, Block block, int position)
{
    BigInteger unscaledValue;
    if (decimalType.isShort()) {
        unscaledValue = BigInteger.valueOf(decimalType.getLong(block, position));
    }
    else {
        unscaledValue = Decimals.decodeUnscaledValue(decimalType.getSlice(block, position));
    }
    return HiveDecimal.create(unscaledValue, decimalType.getScale());
}
 
Example 17
Source Project: presto   Source File: AbstractTestParquetReader.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testParquetLongDecimalWriteToPrestoDecimalWithNonMatchingScale()
        throws Exception
{
    tester.testRoundTrip(
            new JavaHiveDecimalObjectInspector(new DecimalTypeInfo(38, 10)),
            ImmutableList.of(HiveDecimal.create(100 * longTenToNth(10), 10)),
            ImmutableList.of(new SqlDecimal(BigInteger.valueOf(100 * longTenToNth(9)), 38, 9)),
            createDecimalType(38, 9));
}
 
Example 18
Source Project: flink   Source File: HiveInspectors.java    License: Apache License 2.0 5 votes vote down vote up
private static ConstantObjectInspector getPrimitiveJavaConstantObjectInspector(PrimitiveTypeInfo typeInfo, Object value) {
	switch (typeInfo.getPrimitiveCategory()) {
		case BOOLEAN:
			return new JavaConstantBooleanObjectInspector((Boolean) value);
		case BYTE:
			return new JavaConstantByteObjectInspector((Byte) value);
		case SHORT:
			return new JavaConstantShortObjectInspector((Short) value);
		case INT:
			return new JavaConstantIntObjectInspector((Integer) value);
		case LONG:
			return new JavaConstantLongObjectInspector((Long) value);
		case FLOAT:
			return new JavaConstantFloatObjectInspector((Float) value);
		case DOUBLE:
			return new JavaConstantDoubleObjectInspector((Double) value);
		case STRING:
			return new JavaConstantStringObjectInspector((String) value);
		case CHAR:
			return new JavaConstantHiveCharObjectInspector((HiveChar) value);
		case VARCHAR:
			return new JavaConstantHiveVarcharObjectInspector((HiveVarchar) value);
		case DATE:
			return new JavaConstantDateObjectInspector((Date) value);
		case TIMESTAMP:
			return new JavaConstantTimestampObjectInspector((Timestamp) value);
		case DECIMAL:
			return new JavaConstantHiveDecimalObjectInspector((HiveDecimal) value);
		case BINARY:
			return new JavaConstantBinaryObjectInspector((byte[]) value);
		case UNKNOWN:
		case VOID:
			// If type is null, we use the Java Constant String to replace
			return new JavaConstantStringObjectInspector((String) value);
		default:
			throw new FlinkHiveUDFException(
				String.format("Cannot find ConstantObjectInspector for %s", typeInfo));
	}
}
 
Example 19
@Override
public Object convert(Object input) {
  if (input == null) {
    return null;
  }
  Object r = outputOI.create(HiveDecimal.ZERO);
  return outputOI.set(r, PrimitiveObjectInspectorUtils.getHiveDecimal(input, inputOI));
}
 
Example 20
Source Project: pxf   Source File: HiveORCSearchArgumentBuilder.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Get the type of the given expression node.
 *
 * @param literal the object
 * @return int, string, or float or null if we don't know the type
 */
private PredicateLeaf.Type getType(Object literal) {
    if (literal instanceof Byte ||
            literal instanceof Short ||
            literal instanceof Integer ||
            literal instanceof Long) {
        return PredicateLeaf.Type.LONG;
    } else if (literal instanceof HiveChar ||
            literal instanceof HiveVarchar ||
            literal instanceof String) {
        return PredicateLeaf.Type.STRING;
    } else if (literal instanceof Float ||
            literal instanceof Double) {
        return PredicateLeaf.Type.FLOAT;
    } else if (literal instanceof Date) {
        return PredicateLeaf.Type.DATE;
    } else if (literal instanceof Timestamp) {
        return PredicateLeaf.Type.TIMESTAMP;
    } else if (literal instanceof HiveDecimal ||
            literal instanceof BigDecimal) {
        return PredicateLeaf.Type.DECIMAL;
    } else if (literal instanceof Boolean) {
        return PredicateLeaf.Type.BOOLEAN;
    } else if (literal instanceof List) {
        @SuppressWarnings("unchecked")
        List<Object> l = (List<Object>) literal;
        if (l.size() > 0)
            return getType(l.get(0));
    }
    throw new IllegalArgumentException(String.format("Unknown type for literal %s", literal));
}
 
Example 21
private Object convertStringTypes(Object val, HCatFieldSchema hfs) {
  HCatFieldSchema.Type hfsType = hfs.getType();
  if (hfsType == HCatFieldSchema.Type.STRING
      || hfsType == HCatFieldSchema.Type.VARCHAR
      || hfsType == HCatFieldSchema.Type.CHAR) {
    String str = val.toString();
    if (doHiveDelimsReplacement) {
      str = FieldFormatter.hiveStringReplaceDelims(str,
        hiveDelimsReplacement, hiveDelimiters);
    }
    if (hfsType == HCatFieldSchema.Type.STRING) {
      return str;
    } else if (hfsType == HCatFieldSchema.Type.VARCHAR) {
      VarcharTypeInfo vti = (VarcharTypeInfo) hfs.getTypeInfo();
      HiveVarchar hvc = new HiveVarchar(str, vti.getLength());
      return hvc;
    } else if (hfsType == HCatFieldSchema.Type.CHAR) {
      CharTypeInfo cti = (CharTypeInfo) hfs.getTypeInfo();
      HiveChar hc = new HiveChar(val.toString(), cti.getLength());
      return hc;
    }
  } else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
    BigDecimal bd = new BigDecimal(val.toString(), MathContext.DECIMAL128);
    HiveDecimal hd = HiveDecimal.create(bd);
    return hd;
  }
  return null;
}
 
Example 22
Source Project: dremio-oss   Source File: ObjectInspectors.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public HiveDecimal getPrimitiveJavaObject(Object o){
<#if mode == "Optional">
  if (o == null) {
    return null;
  }
  final NullableDecimalHolder h = (NullableDecimalHolder) o;
<#else>
  final DecimalHolder h = (DecimalHolder) o;
</#if>
  h.start = (h.start / org.apache.arrow.vector.util.DecimalUtility.DECIMAL_BYTE_LENGTH);
  return HiveDecimal.create(DecimalUtility.getBigDecimalFromArrowBuf(h.buffer, h.start, h.scale));
}
 
Example 23
Source Project: hadoopcryptoledger   Source File: BitcoinUDFTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
  public void BitcoinTransactionHashUDFWriteable()  throws HiveException  {
	BitcoinTransactionHashUDF bthUDF = new BitcoinTransactionHashUDF();
	ObjectInspector[] arguments = new ObjectInspector[1];
	arguments[0] =  ObjectInspectorFactory.getReflectionObjectInspector(HiveBitcoinTransaction.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
	bthUDF.initialize(arguments);	
// create BitcoinTransaction
 // reconstruct the transaction from the genesis block
	int version=1;
	byte[] inCounter = new byte[]{0x01};
	byte[] previousTransactionHash = new byte[]{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
	long previousTxOutIndex = 4294967295L;
	byte[] txInScriptLength = new byte[]{(byte)0x4D};
	byte[] txInScript= new byte[]{(byte)0x04,(byte)0xFF,(byte)0xFF,(byte)0x00,(byte)0x1D,(byte)0x01,(byte)0x04,(byte)0x45,(byte)0x54,(byte)0x68,(byte)0x65,(byte)0x20,(byte)0x54,(byte)0x69,(byte)0x6D,(byte)0x65,(byte)0x73,(byte)0x20,(byte)0x30,(byte)0x33,(byte)0x2F,(byte)0x4A,(byte)0x61,(byte)0x6E,(byte)0x2F,(byte)0x32,(byte)0x30,(byte)0x30,(byte)0x39,(byte)0x20,(byte)0x43,(byte)0x68,(byte)0x61,(byte)0x6E,(byte)0x63,(byte)0x65,(byte)0x6C,(byte)0x6C,(byte)0x6F,(byte)0x72,(byte)0x20,(byte)0x6F,(byte)0x6E,(byte)0x20,(byte)0x62,(byte)0x72,(byte)0x69,(byte)0x6E,(byte)0x6B,(byte)0x20,(byte)0x6F,(byte)0x66,(byte)0x20,(byte)0x73,(byte)0x65,(byte)0x63,(byte)0x6F,(byte)0x6E,(byte)0x64,(byte)0x20,(byte)0x62,(byte)0x61,(byte)0x69,(byte)0x6C,(byte)0x6F,(byte)0x75,(byte)0x74,(byte)0x20,(byte)0x66,(byte)0x6F,(byte)0x72,(byte)0x20,(byte)0x62,(byte)0x61,(byte)0x6E,(byte)0x6B,(byte)0x73};
	long seqNo=4294967295L;
	byte[] outCounter = new byte[]{0x01};
	long value=5000000000L;
	byte[] txOutScriptLength=new byte[]{(byte)0x43};
	byte[] txOutScript=new byte[]{(byte)0x41,(byte)0x04,(byte)0x67,(byte)0x8A,(byte)0xFD,(byte)0xB0,(byte)0xFE,(byte)0x55,(byte)0x48,(byte)0x27,(byte)0x19,(byte)0x67,(byte)0xF1,(byte)0xA6,(byte)0x71,(byte)0x30,(byte)0xB7,(byte)0x10,(byte)0x5C,(byte)0xD6,(byte)0xA8,(byte)0x28,(byte)0xE0,(byte)0x39,(byte)0x09,(byte)0xA6,(byte)0x79,(byte)0x62,(byte)0xE0,(byte)0xEA,(byte)0x1F,(byte)0x61,(byte)0xDE,(byte)0xB6,(byte)0x49,(byte)0xF6,(byte)0xBC,(byte)0x3F,(byte)0x4C,(byte)0xEF,(byte)0x38,(byte)0xC4,(byte)0xF3,(byte)0x55,(byte)0x04,(byte)0xE5,(byte)0x1E,(byte)0xC1,(byte)0x12,(byte)0xDE,(byte)0x5C,(byte)0x38,(byte)0x4D,(byte)0xF7,(byte)0xBA,(byte)0x0B,(byte)0x8D,(byte)0x57,(byte)0x8A,(byte)0x4C,(byte)0x70,(byte)0x2B,(byte)0x6B,(byte)0xF1,(byte)0x1D,(byte)0x5F,(byte)0xAC};
	int lockTime = 0;
	List<BitcoinTransactionInput> genesisInput = new ArrayList<BitcoinTransactionInput>(1);
	genesisInput.add(new BitcoinTransactionInput(previousTransactionHash,previousTxOutIndex,txInScriptLength,txInScript,seqNo));
	List<HiveBitcoinTransactionOutput> genesisOutput = new ArrayList<HiveBitcoinTransactionOutput>(1);
	genesisOutput.add(new HiveBitcoinTransactionOutput(HiveDecimal.create(BigInteger.valueOf(value)),txOutScriptLength,txOutScript));
	 HiveBitcoinTransaction genesisTransaction = new HiveBitcoinTransaction(version,inCounter,genesisInput,outCounter,genesisOutput,lockTime);
	 byte[] expectedHash = BitcoinUtil.reverseByteArray(new byte[]{(byte)0x4A,(byte)0x5E,(byte)0x1E,(byte)0x4B,(byte)0xAA,(byte)0xB8,(byte)0x9F,(byte)0x3A,(byte)0x32,(byte)0x51,(byte)0x8A,(byte)0x88,(byte)0xC3,(byte)0x1B,(byte)0xC8,(byte)0x7F,(byte)0x61,(byte)0x8F,(byte)0x76,(byte)0x67,(byte)0x3E,(byte)0x2C,(byte)0xC7,(byte)0x7A,(byte)0xB2,(byte)0x12,(byte)0x7B,(byte)0x7A,(byte)0xFD,(byte)0xED,(byte)0xA3,(byte)0x3B});
	GenericUDF.DeferredObject[] doa = new GenericUDF.DeferredObject[1];
	doa[0]=new GenericUDF.DeferredJavaObject(genesisTransaction);
	BytesWritable bw = (BytesWritable) bthUDF.evaluate(doa);
	
	assertArrayEquals( expectedHash,bw.copyBytes(),"BitcoinTransaction object genesis transaction hash from UDF");
  }
 
Example 24
Source Project: hadoopcryptoledger   Source File: BitcoinUDFTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
  public void BitcoinTransactionHashUDFObjectInspector() throws HiveException {
	BitcoinTransactionHashUDF bthUDF = new BitcoinTransactionHashUDF();
	ObjectInspector[] arguments = new ObjectInspector[1];
	arguments[0] =  ObjectInspectorFactory.getReflectionObjectInspector(TestBitcoinTransaction.class,ObjectInspectorFactory.ObjectInspectorOptions.JAVA);
	bthUDF.initialize(arguments);	
// create BitcoinTransaction
 // reconstruct the transaction from the genesis block
	int version=1;
	byte[] inCounter = new byte[]{0x01};
	byte[] previousTransactionHash = new byte[]{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00};
	long previousTxOutIndex = 4294967295L;
	byte[] txInScriptLength = new byte[]{(byte)0x4D};
	byte[] txInScript= new byte[]{(byte)0x04,(byte)0xFF,(byte)0xFF,(byte)0x00,(byte)0x1D,(byte)0x01,(byte)0x04,(byte)0x45,(byte)0x54,(byte)0x68,(byte)0x65,(byte)0x20,(byte)0x54,(byte)0x69,(byte)0x6D,(byte)0x65,(byte)0x73,(byte)0x20,(byte)0x30,(byte)0x33,(byte)0x2F,(byte)0x4A,(byte)0x61,(byte)0x6E,(byte)0x2F,(byte)0x32,(byte)0x30,(byte)0x30,(byte)0x39,(byte)0x20,(byte)0x43,(byte)0x68,(byte)0x61,(byte)0x6E,(byte)0x63,(byte)0x65,(byte)0x6C,(byte)0x6C,(byte)0x6F,(byte)0x72,(byte)0x20,(byte)0x6F,(byte)0x6E,(byte)0x20,(byte)0x62,(byte)0x72,(byte)0x69,(byte)0x6E,(byte)0x6B,(byte)0x20,(byte)0x6F,(byte)0x66,(byte)0x20,(byte)0x73,(byte)0x65,(byte)0x63,(byte)0x6F,(byte)0x6E,(byte)0x64,(byte)0x20,(byte)0x62,(byte)0x61,(byte)0x69,(byte)0x6C,(byte)0x6F,(byte)0x75,(byte)0x74,(byte)0x20,(byte)0x66,(byte)0x6F,(byte)0x72,(byte)0x20,(byte)0x62,(byte)0x61,(byte)0x6E,(byte)0x6B,(byte)0x73};
	long seqNo=4294967295L;
	byte[] outCounter = new byte[]{0x01};
	long value=5000000000L;
	byte[] txOutScriptLength=new byte[]{(byte)0x43};
	byte[] txOutScript=new byte[]{(byte)0x41,(byte)0x04,(byte)0x67,(byte)0x8A,(byte)0xFD,(byte)0xB0,(byte)0xFE,(byte)0x55,(byte)0x48,(byte)0x27,(byte)0x19,(byte)0x67,(byte)0xF1,(byte)0xA6,(byte)0x71,(byte)0x30,(byte)0xB7,(byte)0x10,(byte)0x5C,(byte)0xD6,(byte)0xA8,(byte)0x28,(byte)0xE0,(byte)0x39,(byte)0x09,(byte)0xA6,(byte)0x79,(byte)0x62,(byte)0xE0,(byte)0xEA,(byte)0x1F,(byte)0x61,(byte)0xDE,(byte)0xB6,(byte)0x49,(byte)0xF6,(byte)0xBC,(byte)0x3F,(byte)0x4C,(byte)0xEF,(byte)0x38,(byte)0xC4,(byte)0xF3,(byte)0x55,(byte)0x04,(byte)0xE5,(byte)0x1E,(byte)0xC1,(byte)0x12,(byte)0xDE,(byte)0x5C,(byte)0x38,(byte)0x4D,(byte)0xF7,(byte)0xBA,(byte)0x0B,(byte)0x8D,(byte)0x57,(byte)0x8A,(byte)0x4C,(byte)0x70,(byte)0x2B,(byte)0x6B,(byte)0xF1,(byte)0x1D,(byte)0x5F,(byte)0xAC};
	int lockTime = 0;
	List<TestBitcoinTransactionInput> genesisInput = new ArrayList<TestBitcoinTransactionInput>(1);
	genesisInput.add(new TestBitcoinTransactionInput(previousTransactionHash,previousTxOutIndex,txInScriptLength,txInScript,seqNo));
	List<TestBitcoinTransactionOutput> genesisOutput = new ArrayList<TestBitcoinTransactionOutput>(1);
	genesisOutput.add(new TestBitcoinTransactionOutput(HiveDecimal.create(BigInteger.valueOf(value)),txOutScriptLength,txOutScript));
	 TestBitcoinTransaction genesisTransaction = new TestBitcoinTransaction(version,inCounter,genesisInput,outCounter,genesisOutput,lockTime);
	 byte[] expectedHash = BitcoinUtil.reverseByteArray(new byte[]{(byte)0x4A,(byte)0x5E,(byte)0x1E,(byte)0x4B,(byte)0xAA,(byte)0xB8,(byte)0x9F,(byte)0x3A,(byte)0x32,(byte)0x51,(byte)0x8A,(byte)0x88,(byte)0xC3,(byte)0x1B,(byte)0xC8,(byte)0x7F,(byte)0x61,(byte)0x8F,(byte)0x76,(byte)0x67,(byte)0x3E,(byte)0x2C,(byte)0xC7,(byte)0x7A,(byte)0xB2,(byte)0x12,(byte)0x7B,(byte)0x7A,(byte)0xFD,(byte)0xED,(byte)0xA3,(byte)0x3B});
	GenericUDF.DeferredObject[] doa = new GenericUDF.DeferredObject[1];
	doa[0]=new GenericUDF.DeferredJavaObject(genesisTransaction);
	BytesWritable bw = (BytesWritable) bthUDF.evaluate(doa);
	
	assertArrayEquals( expectedHash,bw.copyBytes(),"BitcoinTransaction struct transaction hash from UDF");
  }
 
Example 25
Source Project: HiveRunner   Source File: ConvertersTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void otherTypeInfo() {
  assertEquals(HiveDecimal.create("1.234"), Converters.convert("1.234", decimalTypeInfo));
  assertEquals(new HiveChar("foo", -1), Converters.convert("foo", charTypeInfo));
  assertTrue(new HiveVarchar("foo", -1).equals((HiveVarchar) Converters.convert("foo", varcharTypeInfo)));
  assertEquals("foo", Converters.convert("foo", unknownTypeInfo));
  assertEquals("foo", Converters.convert("foo", voidTypeInfo));
}
 
Example 26
Source Project: HiveRunner   Source File: Converters.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Object convert(@SuppressWarnings("rawtypes") Class type, Object value) {
  try {
    return HiveDecimal.create(new BigDecimal(value.toString()));
  } catch (NumberFormatException e) {
    throw new ConversionException(e);
  }
}
 
Example 27
Source Project: hadoopcryptoledger   Source File: BitcoinHiveSerdeTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
 public void deserialize() throws  FileNotFoundException, IOException, BitcoinBlockReadException {
BitcoinBlockSerde testSerde = new BitcoinBlockSerde();
// create a BitcoinBlock based on the genesis block test data
ClassLoader classLoader = getClass().getClassLoader();
String fileName="genesis.blk";
String fullFileNameString=classLoader.getResource("testdata/"+fileName).getFile();	
File file = new File(fullFileNameString);
BitcoinBlockReader bbr = null;
boolean direct=false;
try {
	FileInputStream fin = new FileInputStream(file);
	bbr = new BitcoinBlockReader(fin,this.DEFAULT_MAXSIZE_BITCOINBLOCK,this.DEFAULT_BUFFERSIZE,this.DEFAULT_MAGIC,direct);
	BitcoinBlock theBitcoinBlock = bbr.readBlock();
// deserialize it
	Object deserializedObject = testSerde.deserialize(theBitcoinBlock);
	assertTrue( deserializedObject instanceof HiveBitcoinBlock,"Deserialized Object is of type BitcoinBlock");
	HiveBitcoinBlock deserializedBitcoinBlockStruct = (HiveBitcoinBlock)deserializedObject;
// verify certain attributes
	assertEquals( 1, deserializedBitcoinBlockStruct.getTransactions().size(),"Genesis Block must contain exactly one transaction");
	assertEquals( 1, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfInputs().size(),"Genesis Block must contain exactly one transaction with one input");
	assertEquals( 77, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfInputs().get(0).getTxInScript().length,"Genesis Block must contain exactly one transaction with one input and script length 77");
	assertEquals( 0, HiveDecimal.create(5000000000L).compareTo(deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfOutputs().get(0).getValue()), "Value must be BigInteger corresponding to 5000000000L");
	
	
	assertEquals( 1, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfOutputs().size(),"Genesis Block must contain exactly one transaction with one output");
	assertEquals( 67, deserializedBitcoinBlockStruct.getTransactions().get(0).getListOfOutputs().get(0).getTxOutScript().length,"Genesis Block must contain exactly one transaction with one output and script length 67");
} finally {
	if (bbr!=null) 
		bbr.close();
}
 }
 
Example 28
Source Project: osm2orc   Source File: OrcWriter.java    License: ISC License 5 votes vote down vote up
@Override
public void process(NodeContainer container) {
    DecimalColumnVector lat = (DecimalColumnVector) batch.cols[3];
    DecimalColumnVector lon = (DecimalColumnVector) batch.cols[4];

    checkLimit();
    addCommonProperties(container);

    Node node = container.getEntity();
    lat.set(row, HiveDecimal.create(BigDecimal.valueOf(node.getLatitude())));
    lon.set(row, HiveDecimal.create(BigDecimal.valueOf(node.getLongitude())));
}
 
Example 29
Source Project: osm2orc   Source File: OrcWriter.java    License: ISC License 5 votes vote down vote up
@Override
public void process(RelationContainer container) {
    DecimalColumnVector lat = (DecimalColumnVector) batch.cols[3];
    DecimalColumnVector lon = (DecimalColumnVector) batch.cols[4];
    ListColumnVector members = (ListColumnVector) batch.cols[6];

    checkLimit();
    addCommonProperties(container);

    lat.isNull[row] = true;
    lon.isNull[row] = true;
    lat.set(row, (HiveDecimal) null);
    lon.set(row, (HiveDecimal) null);

    Relation relation = container.getEntity();

    members.lengths[row] = relation.getMembers().size();
    members.childCount += members.lengths[row];
    members.child.ensureSize(members.childCount, members.offsets[row] != 0);

    for (int j = 0; j < relation.getMembers().size(); j++) {
        StructColumnVector membersStruct = (StructColumnVector) members.child;

        ((BytesColumnVector) membersStruct.fields[0]).setVal((int) members.offsets[row] + j, relation.getMembers().get(j).getMemberType().toString().toLowerCase().getBytes());
        ((LongColumnVector) membersStruct.fields[1]).vector[(int) members.offsets[row] + j] = relation.getMembers().get(j).getMemberId();
        ((BytesColumnVector) membersStruct.fields[2]).setVal((int) members.offsets[row] + j, relation.getMembers().get(j).getMemberRole().getBytes());
    }
}
 
Example 30
Source Project: nifi   Source File: TestNiFiRecordSerDe.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testStructMap() throws SerDeException{
    NiFiRecordSerDe serDe = createSerDe(
            "mapc",
            "map<string,struct<id:int,balance:decimal(18,2)>>"
    );
    RecordSchema recordSchema = new SimpleRecordSchema(Arrays.asList(
            new RecordField("id", RecordFieldType.INT.getDataType()),
            new RecordField("balance", RecordFieldType.DOUBLE.getDataType())
    ));
    RecordSchema schema = new SimpleRecordSchema(Collections.singletonList(
            new RecordField("mapc", RecordFieldType.MAP.getMapDataType(RecordFieldType.RECORD.getRecordDataType(recordSchema)))
    ));

    HashMap<String, Object> input = new HashMap<String, Object>() {{
        put("mapc", new HashMap<String, Object>() {{
            put("current", new MapRecord(recordSchema, new HashMap<String, Object>() {{
                put("id", 1);
                put("balance", 56.9);
            }}));
            put("savings", new MapRecord(recordSchema, new HashMap<String, Object>() {{
                put("id", 2);
                put("balance", 104.65);
            }}));
        }});
    }};

    Object expected = Collections.singletonList(
            new HashMap<String, Object>() {{
                put("current", Arrays.asList(1, HiveDecimal.create(56.9)));
                put("savings", Arrays.asList(2, HiveDecimal.create(104.65)));
            }}
    );

    Object deserialized = serDe.deserialize(new ObjectWritable(new MapRecord(schema, input)));

    assertEquals(expected, deserialized);
}