Java Code Examples for org.apache.hadoop.hbase.util.Bytes#toDouble()
The following examples show how to use
org.apache.hadoop.hbase.util.Bytes#toDouble() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DefaultHBaseSerde.java From envelope with Apache License 2.0 | 6 votes |
private static Object getColumnValue(byte[] source, int offset, int length, String type) { switch (type) { case ConfigurationDataTypes.INT: return Bytes.toInt(source, offset, length); case ConfigurationDataTypes.LONG: return Bytes.toLong(source, offset, length); case ConfigurationDataTypes.BOOLEAN: return Bytes.toBoolean(source); case ConfigurationDataTypes.FLOAT: return Bytes.toFloat(source); case ConfigurationDataTypes.DOUBLE: return Bytes.toDouble(source); case ConfigurationDataTypes.STRING: return Bytes.toString(source, offset, length); default: LOG.error("Unsupported column type: {}", type); throw new IllegalArgumentException("Unsupported column type: " + type); } }
Example 2
Source File: HBaseFieldInfo.java From attic-apex-malhar with Apache License 2.0 | 6 votes |
public Object toValue( byte[] bytes ) { final SupportType type = getType(); switch (type) { case BOOLEAN: return Bytes.toBoolean( bytes ); case SHORT: return Bytes.toShort( bytes ); case INTEGER: return Bytes.toInt( bytes ); case LONG: return Bytes.toLong( bytes ); case FLOAT: return Bytes.toFloat( bytes ); case DOUBLE: return Bytes.toDouble( bytes ); case STRING: return Bytes.toString( bytes ); default: throw new IllegalArgumentException("Unsupported type: " + type); } }
Example 3
Source File: HBaseRecordCursor.java From presto-hbase-connector with Apache License 2.0 | 5 votes |
/** * We store the column value in HBase like Bytes.toBytes(value) rather than Bytes.toBytes(value.toString) * * @param type type * @param value value * @return object */ Object matchValue(Type type, byte[] value) { if (type == null) { return Bytes.toString(value); } Class typeClass = type.getClass(); // return null if value is null // || (For varchar type if length is 0, return value is Bytes.toString(value), this will return "") // beside varchar type if length is 0, the value will be null if (value == null) { return null; } if (value.length == 0 && !typeClass.equals(VARCHAR_CLASS)) { return null; } if (typeClass.equals(VARCHAR_CLASS)) { return Bytes.toString(value); } else if (typeClass.equals(INTEGER_CLASS)) { return Bytes.toInt(value); } else if (typeClass.equals(BIGINT_CLASS)) { return Bytes.toLong(value); } else if (typeClass.equals(DOUBLE_CLASS)) { return Bytes.toDouble(value); } else if (typeClass.equals(TIMESTAMP_CLASS)) { return Bytes.toLong(value); } else if (typeClass.equals(BOOLEAN_CLASS)) { // 0: false, 1: true return Bytes.toInt(value); } else if (type.getClass().getSuperclass().equals(DecimalType.class)) { return Bytes.toBigDecimal(value); } else { return Bytes.toString(value); } }
Example 4
Source File: TestHBaseStorage.java From spork with Apache License 2.0 | 5 votes |
/** * load from hbase 'TESTTABLE_1' using HBaseBinary format, and store it into * 'TESTTABLE_2' using HBaseBinaryFormat projecting out column c * * @throws IOException */ @Test public void testStoreToHBase_1_with_projection() throws IOException { System.getProperties().setProperty("pig.usenewlogicalplan", "false"); prepareTable(TESTTABLE_1, true, DataFormat.HBaseBinary); prepareTable(TESTTABLE_2, false, DataFormat.HBaseBinary); scanTable1(pig, DataFormat.HBaseBinary); pig.registerQuery("b = FOREACH a GENERATE rowKey, col_a, col_b;"); pig.store("b", TESTTABLE_2, "org.apache.pig.backend.hadoop.hbase.HBaseStorage('" + TESTCOLUMN_A + " " + TESTCOLUMN_B + "','-caster HBaseBinaryConverter')"); HTable table = new HTable(conf, TESTTABLE_2); ResultScanner scanner = table.getScanner(new Scan()); Iterator<Result> iter = scanner.iterator(); int i = 0; for (i = 0; iter.hasNext(); ++i) { Result result = iter.next(); String v = String.valueOf(i); String rowKey = Bytes.toString(result.getRow()); int col_a = Bytes.toInt(getColValue(result, TESTCOLUMN_A)); double col_b = Bytes.toDouble(getColValue(result, TESTCOLUMN_B)); Assert.assertEquals("00".substring(v.length()) + v, rowKey); Assert.assertEquals(i, col_a); Assert.assertEquals(i + 0.0, col_b, 1e-6); } Assert.assertEquals(100, i); table.close(); }
Example 5
Source File: TestHBaseStorage.java From spork with Apache License 2.0 | 5 votes |
/** * load from hbase 'TESTTABLE_1' using HBaseBinary format, and store it into * 'TESTTABLE_2' using HBaseBinaryFormat * * @throws IOException */ @Test public void testStoreToHBase_1() throws IOException { prepareTable(TESTTABLE_1, true, DataFormat.HBaseBinary); prepareTable(TESTTABLE_2, false, DataFormat.HBaseBinary); pig.getPigContext().getProperties() .setProperty(MRConfiguration.FILEOUTPUTCOMMITTER_MARKSUCCESSFULJOBS, "true"); scanTable1(pig, DataFormat.HBaseBinary); pig.store("a", "hbase://" + TESTTABLE_2, "org.apache.pig.backend.hadoop.hbase.HBaseStorage('" + TESTCOLUMN_A + " " + TESTCOLUMN_B + " " + TESTCOLUMN_C + "','-caster HBaseBinaryConverter')"); HTable table = new HTable(conf, TESTTABLE_2); ResultScanner scanner = table.getScanner(new Scan()); Iterator<Result> iter = scanner.iterator(); int i = 0; for (i = 0; iter.hasNext(); ++i) { Result result = iter.next(); String v = i + ""; String rowKey = Bytes.toString(result.getRow()); int col_a = Bytes.toInt(getColValue(result, TESTCOLUMN_A)); double col_b = Bytes.toDouble(getColValue(result, TESTCOLUMN_B)); String col_c = Bytes.toString(getColValue(result, TESTCOLUMN_C)); Assert.assertEquals("00".substring(v.length()) + v, rowKey); Assert.assertEquals(i, col_a); Assert.assertEquals(i + 0.0, col_b, 1e-6); Assert.assertEquals("Text_" + i, col_c); } Assert.assertEquals(100, i); pig.getPigContext().getProperties() .setProperty(MRConfiguration.FILEOUTPUTCOMMITTER_MARKSUCCESSFULJOBS, "false"); table.close(); }
Example 6
Source File: HBaseBinaryConverter.java From spork with Apache License 2.0 | 5 votes |
@Override public Double bytesToDouble(byte[] b) throws IOException { if (Bytes.SIZEOF_DOUBLE > b.length){ return Bytes.toDouble(Bytes.padHead(b, Bytes.SIZEOF_DOUBLE - b.length)); } else { return Bytes.toDouble(Bytes.head(b, Bytes.SIZEOF_DOUBLE)); } }
Example 7
Source File: PhTypeUtil.java From canal with Apache License 2.0 | 5 votes |
private static double decodeUnsignedDouble(byte[] b, int o) { checkForSufficientLength(b, o, Bytes.SIZEOF_DOUBLE); double v = Bytes.toDouble(b, o); if (v < 0) { throw new RuntimeException(); } return v; }
Example 8
Source File: ByteUtil.java From hraven with Apache License 2.0 | 5 votes |
/** * return a value from the NavigableMap as a Double * @param key to be looked up for the value * @param infoValues - the map containing the key values * @return value as Double or 0.0 */ public static double getValueAsDouble(byte[] key, NavigableMap<byte[], byte[]> infoValues) { byte[] value = infoValues.get(key); if (value != null) { return Bytes.toDouble(value); } else { return 0.0; } }
Example 9
Source File: PUnsignedDouble.java From phoenix with Apache License 2.0 | 5 votes |
@Override public double decodeDouble(byte[] b, int o, SortOrder sortOrder) { Preconditions.checkNotNull(sortOrder); checkForSufficientLength(b, o, Bytes.SIZEOF_DOUBLE); if (sortOrder == SortOrder.DESC) { b = SortOrder.invert(b, o, new byte[Bytes.SIZEOF_DOUBLE], 0, Bytes.SIZEOF_DOUBLE); } double v = Bytes.toDouble(b, o); if (v < 0) { throw newIllegalDataException(); } return v; }
Example 10
Source File: AppSummaryService.java From hraven with Apache License 2.0 | 5 votes |
private boolean updateCost(AppAggregationKey appAggKey, Table aggTable, JobDetails jobDetails) throws IOException { byte[] rowKey = aggConv.toBytes(appAggKey); Get g = new Get(rowKey); g.addColumn(AggregationConstants.INFO_FAM_BYTES, AggregationConstants.JOBCOST_BYTES); Result r = aggTable.get(g); double existingCost = 0.0; byte[] existingCostBytes = null; Cell columnLatest = r.getColumnLatestCell(AggregationConstants.INFO_FAM_BYTES, AggregationConstants.JOBCOST_BYTES); if (columnLatest != null) { existingCost = Bytes.toDouble(CellUtil.cloneValue(columnLatest)); existingCostBytes = Bytes.toBytes(existingCost); } double newCost = existingCost + jobDetails.getCost(); if (LOG.isTraceEnabled()) { LOG.trace(" total app aggregated cost " + newCost); } // now insert cost return executeCheckAndPut(aggTable, rowKey, existingCostBytes, Bytes.toBytes(newCost), AggregationConstants.INFO_FAM_BYTES, AggregationConstants.JOBCOST_BYTES); }
Example 11
Source File: HBaseDoubleComparator.java From pxf with Apache License 2.0 | 5 votes |
public static ByteArrayComparable parseFrom(final byte[] pbBytes) throws DeserializationException { ComparatorProtos.ByteArrayComparable proto; try { proto = ComparatorProtos.ByteArrayComparable.parseFrom(pbBytes); } catch (InvalidProtocolBufferException e) { throw new DeserializationException(e); } return new HBaseDoubleComparator(Bytes.toDouble(proto.getValue().toByteArray())); }
Example 12
Source File: MultiVersionTask.java From DataLink with Apache License 2.0 | 5 votes |
private Column convertBytesToAssignType(ColumnType columnType, byte[] byteArray) throws UnsupportedEncodingException { Column column; switch (columnType) { case BOOLEAN: column = new BoolColumn(byteArray == null ? null : Bytes.toBoolean(byteArray)); break; case SHORT: column = new LongColumn(byteArray == null ? null : String.valueOf(Bytes.toShort(byteArray))); break; case INT: column = new LongColumn(byteArray == null ? null : Bytes.toInt(byteArray)); break; case LONG: column = new LongColumn(byteArray == null ? null : Bytes.toLong(byteArray)); break; case BYTES: column = new BytesColumn(byteArray); break; case FLOAT: column = new DoubleColumn(byteArray == null ? null : Bytes.toFloat(byteArray)); break; case DOUBLE: column = new DoubleColumn(byteArray == null ? null : Bytes.toDouble(byteArray)); break; case STRING: column = new StringColumn(byteArray == null ? null : new String(byteArray, super.encoding)); break; case BINARY_STRING: column = new StringColumn(byteArray == null ? null : Bytes.toStringBinary(byteArray)); break; default: throw DataXException.asDataXException(HbaseReaderErrorCode.ILLEGAL_VALUE, "Hbasereader 不支持您配置的列类型:" + columnType); } return column; }
Example 13
Source File: PhTypeUtil.java From canal-1.1.3 with Apache License 2.0 | 5 votes |
private static double decodeUnsignedDouble(byte[] b, int o) { checkForSufficientLength(b, o, Bytes.SIZEOF_DOUBLE); double v = Bytes.toDouble(b, o); if (v < 0) { throw new RuntimeException(); } return v; }
Example 14
Source File: HBaseTypeUtils.java From flink with Apache License 2.0 | 5 votes |
/** * Deserialize byte array to Java Object with the given type. */ public static Object deserializeToObject(byte[] value, int typeIdx, Charset stringCharset) { switch (typeIdx) { case 0: // byte[] return value; case 1: // String return new String(value, stringCharset); case 2: // byte return value[0]; case 3: return Bytes.toShort(value); case 4: return Bytes.toInt(value); case 5: return Bytes.toLong(value); case 6: return Bytes.toFloat(value); case 7: return Bytes.toDouble(value); case 8: return Bytes.toBoolean(value); case 9: // sql.Timestamp encoded as long return new Timestamp(Bytes.toLong(value)); case 10: // sql.Date encoded as long return new Date(Bytes.toLong(value)); case 11: // sql.Time encoded as long return new Time(Bytes.toLong(value)); case 12: return Bytes.toBigDecimal(value); case 13: return new BigInteger(value); default: throw new IllegalArgumentException("unsupported type index:" + typeIdx); } }
Example 15
Source File: DefaultColumnCoder.java From tddl5 with Apache License 2.0 | 4 votes |
protected Object decodeDoubleFromBytes(byte[] v) { return Bytes.toDouble(v); }
Example 16
Source File: DeserializedBooleanComparator.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
public static Boolean decodeBoolFromNumber( byte[] rawEncoded ) { if ( rawEncoded.length == Bytes.SIZEOF_BYTE ) { byte val = rawEncoded[ 0 ]; if ( val == 0 || val == 1 ) { return new Boolean( val == 1 ); } } if ( rawEncoded.length == Bytes.SIZEOF_SHORT ) { short tempShort = Bytes.toShort( rawEncoded ); if ( tempShort == 0 || tempShort == 1 ) { return new Boolean( tempShort == 1 ); } } if ( rawEncoded.length == Bytes.SIZEOF_INT || rawEncoded.length == Bytes.SIZEOF_FLOAT ) { int tempInt = Bytes.toInt( rawEncoded ); if ( tempInt == 1 || tempInt == 0 ) { return new Boolean( tempInt == 1 ); } float tempFloat = Bytes.toFloat( rawEncoded ); if ( tempFloat == 0.0f || tempFloat == 1.0f ) { return new Boolean( tempFloat == 1.0f ); } } if ( rawEncoded.length == Bytes.SIZEOF_LONG || rawEncoded.length == Bytes.SIZEOF_DOUBLE ) { long tempLong = Bytes.toLong( rawEncoded ); if ( tempLong == 0L || tempLong == 1L ) { return new Boolean( tempLong == 1L ); } double tempDouble = Bytes.toDouble( rawEncoded ); if ( tempDouble == 0.0 || tempDouble == 1.0 ) { return new Boolean( tempDouble == 1.0 ); } } // not identifiable from a number return null; }
Example 17
Source File: SamplingFilter.java From spliceengine with GNU Affero General Public License v3.0 | 4 votes |
public static SamplingFilter parseFrom(final byte [] pbBytes) throws DeserializationException { double rate = Bytes.toDouble(pbBytes); return new SamplingFilter(rate); }
Example 18
Source File: RawDouble.java From hbase with Apache License 2.0 | 4 votes |
/** * Read a {@code double} value from the buffer {@code buff}. */ public double decodeDouble(byte[] buff, int offset) { double val = Bytes.toDouble(buff, offset); return val; }
Example 19
Source File: CommonHBaseBytesUtil.java From pentaho-hadoop-shims with Apache License 2.0 | 4 votes |
public double toDouble( byte[] value ) { return Bytes.toDouble( value ); }
Example 20
Source File: ByteArrayValueMappers.java From hbase-indexer with Apache License 2.0 | 4 votes |
@Override protected Object mapInternal(byte[] input) { return Bytes.toDouble(input); }