Java Code Examples for org.apache.hadoop.hbase.KeyValue#getFamilyOffset()
The following examples show how to use
org.apache.hadoop.hbase.KeyValue#getFamilyOffset() .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: phoenix File: IndexManagementUtil.java License: BSD 3-Clause "New" or "Revised" License | 6 votes |
public static ValueGetter createGetterFromKeyValues(Collection<KeyValue> pendingUpdates) { final Map<ReferencingColumn, ImmutableBytesPtr> valueMap = Maps.newHashMapWithExpectedSize(pendingUpdates .size()); for (KeyValue kv : pendingUpdates) { // create new pointers to each part of the kv ImmutableBytesPtr family = new ImmutableBytesPtr(kv.getBuffer(), kv.getFamilyOffset(), kv.getFamilyLength()); ImmutableBytesPtr qual = new ImmutableBytesPtr(kv.getBuffer(), kv.getQualifierOffset(), kv.getQualifierLength()); ImmutableBytesPtr value = new ImmutableBytesPtr(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength()); valueMap.put(new ReferencingColumn(family, qual), value); } return new ValueGetter() { @Override public ImmutableBytesPtr getLatestValue(ColumnReference ref) throws IOException { return valueMap.get(ReferencingColumn.wrap(ref)); } }; }
Example 2
Source Project: phoenix File: SchemaUtil.java License: BSD 3-Clause "New" or "Revised" License | 6 votes |
private static KeyValue upgradeTo3(KeyValue keyValue) { byte[] buf = keyValue.getBuffer(); int newLength = keyValue.getRowLength() + 1; byte[] newKey = new byte[newLength]; newKey[0] = QueryConstants.SEPARATOR_BYTE; System.arraycopy(buf, keyValue.getRowOffset(), newKey, 1, keyValue.getRowLength()); byte[] valueBuf = updateValueIfNecessary(keyValue); int valueOffset = keyValue.getValueOffset(); int valueLength = keyValue.getValueLength(); if (valueBuf != buf) { valueOffset = 0; valueLength = valueBuf.length; } return new KeyValue(newKey, 0, newLength, buf, keyValue.getFamilyOffset(), keyValue.getFamilyLength(), buf, keyValue.getQualifierOffset(), keyValue.getQualifierLength(), keyValue.getTimestamp(), Type.codeToType(keyValue.getType()), valueBuf, valueOffset, valueLength); }
Example 3
Source Project: Halyard File: HBaseSailHashConflictTest.java License: Apache License 2.0 | 5 votes |
@BeforeClass public static void setup() throws Exception { try (HTable table = HalyardTableUtils.getTable(HBaseServerTestInstance.getInstanceConfig(), "testConflictingHash", true, 0)) { long timestamp = System.currentTimeMillis(); KeyValue triple[] = HalyardTableUtils.toKeyValues(SUBJ, PRED, OBJ, null, false, timestamp); KeyValue conflicts[][] = new KeyValue[][] { HalyardTableUtils.toKeyValues(SUBJ, PRED, CONF, null, false, timestamp), HalyardTableUtils.toKeyValues(SUBJ, CONF, OBJ, null, false, timestamp), HalyardTableUtils.toKeyValues(SUBJ, CONF, CONF, null, false, timestamp), HalyardTableUtils.toKeyValues(CONF, PRED, OBJ, null, false, timestamp), HalyardTableUtils.toKeyValues(CONF, PRED, CONF, null, false, timestamp), HalyardTableUtils.toKeyValues(CONF, CONF, OBJ, null, false, timestamp), HalyardTableUtils.toKeyValues(CONF, CONF, CONF, null, false, timestamp), }; for (int i=0; i<triple.length; i++) { KeyValue kv = triple[i]; table.put(new Put(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), kv.getTimestamp()).add(kv)); for (int j=0; j<conflicts.length; j++) { KeyValue xkv = new KeyValue(kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength(), conflicts[j][i].getQualifierArray(), conflicts[j][i].getQualifierOffset(), conflicts[j][i].getQualifierLength(), kv.getTimestamp(), KeyValue.Type.Put, conflicts[j][i].getValueArray(), conflicts[j][i].getValueOffset(), conflicts[j][i].getValueLength()); table.put(new Put(xkv.getRowArray(), xkv.getRowOffset(), xkv.getRowLength(), xkv.getTimestamp()).add(xkv)); } } table.flushCommits(); } sail = new HBaseSail(HBaseServerTestInstance.getInstanceConfig(), "testConflictingHash", false, 0, true, 0, null, null); sail.initialize(); }
Example 4
Source Project: phoenix File: UpgradeUtil.java License: Apache License 2.0 | 5 votes |
@SuppressWarnings("deprecation") private static KeyValue addSaltByte(KeyValue keyValue, int nSaltBuckets) { byte[] buf = keyValue.getBuffer(); int length = keyValue.getRowLength(); int offset = keyValue.getRowOffset(); boolean isViewSeq = length > SEQ_PREFIX_BYTES.length && Bytes.compareTo(SEQ_PREFIX_BYTES, 0, SEQ_PREFIX_BYTES.length, buf, offset, SEQ_PREFIX_BYTES.length) == 0; if (!isViewSeq && nSaltBuckets == 0) { return null; } byte[] newBuf; if (isViewSeq) { // We messed up the name for the sequences for view indexes so we'll take this opportunity to fix it if (buf[length-1] == 0) { // Global indexes on views have trailing null byte length--; } byte[][] rowKeyMetaData = new byte[3][]; SchemaUtil.getVarChars(buf, offset, length, 0, rowKeyMetaData); byte[] schemaName = rowKeyMetaData[PhoenixDatabaseMetaData.SCHEMA_NAME_INDEX]; byte[] unprefixedSchemaName = new byte[schemaName.length - MetaDataUtil.VIEW_INDEX_SEQUENCE_PREFIX_BYTES.length]; System.arraycopy(schemaName, MetaDataUtil.VIEW_INDEX_SEQUENCE_PREFIX_BYTES.length, unprefixedSchemaName, 0, unprefixedSchemaName.length); byte[] tableName = rowKeyMetaData[PhoenixDatabaseMetaData.TABLE_NAME_INDEX]; PName physicalName = PNameFactory.newName(unprefixedSchemaName); // Reformulate key based on correct data newBuf = MetaDataUtil.getViewIndexSequenceKey(tableName == null ? null : Bytes.toString(tableName), physicalName, nSaltBuckets).getKey(); } else { newBuf = new byte[length + 1]; System.arraycopy(buf, offset, newBuf, SaltingUtil.NUM_SALTING_BYTES, length); newBuf[0] = SaltingUtil.getSaltingByte(newBuf, SaltingUtil.NUM_SALTING_BYTES, length, nSaltBuckets); } return new KeyValue(newBuf, 0, newBuf.length, buf, keyValue.getFamilyOffset(), keyValue.getFamilyLength(), buf, keyValue.getQualifierOffset(), keyValue.getQualifierLength(), keyValue.getTimestamp(), KeyValue.Type.codeToType(keyValue.getType()), buf, keyValue.getValueOffset(), keyValue.getValueLength()); }
Example 5
Source Project: phoenix File: StatisticsCollector.java License: Apache License 2.0 | 4 votes |
/** * Update the current statistics based on the latest batch of key-values from the underlying scanner * * @param results * next batch of {@link KeyValue}s */ public void collectStatistics(final List<Cell> results) { Map<ImmutableBytesPtr, Boolean> famMap = Maps.newHashMap(); List<GuidePostsInfo> rowTracker = null; if(cachedGps == null) { rowTracker = new ArrayList<GuidePostsInfo>(); } for (Cell cell : results) { KeyValue kv = KeyValueUtil.ensureKeyValue(cell); maxTimeStamp = Math.max(maxTimeStamp, kv.getTimestamp()); Pair<Long, GuidePostsInfo> gps; if (cachedGps == null) { ImmutableBytesPtr cfKey = new ImmutableBytesPtr(kv.getFamilyArray(), kv.getFamilyOffset(), kv.getFamilyLength()); gps = guidePostsMap.get(cfKey); if (gps == null) { gps = new Pair<Long, GuidePostsInfo>(0l, new GuidePostsInfo(0, Collections.<byte[]> emptyList(), 0l)); guidePostsMap.put(cfKey, gps); } if (famMap.get(cfKey) == null) { famMap.put(cfKey, true); rowTracker.add(gps.getSecond()); } } else { gps = cachedGps; } int kvLength = kv.getLength(); long byteCount = gps.getFirst() + kvLength; gps.setFirst(byteCount); if (byteCount >= guidepostDepth) { byte[] row = ByteUtil.copyKeyBytesIfNecessary(new ImmutableBytesWritable(kv.getRowArray(), kv .getRowOffset(), kv.getRowLength())); if (gps.getSecond().addGuidePost(row, byteCount)) { gps.setFirst(0l); } } } if(cachedGps == null) { for (GuidePostsInfo s : rowTracker) { s.incrementRowCount(); } } else { cachedGps.getSecond().incrementRowCount(); } }
Example 6
Source Project: hbase File: TestBufferedDataBlockEncoder.java License: Apache License 2.0 | 4 votes |
@Test public void testKVCodecWithTagsForDecodedCellsWithNoTags() throws Exception { KeyValue kv1 = new KeyValue(Bytes.toBytes("r"), Bytes.toBytes("f"), Bytes.toBytes("1"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("1")); // kv1.getKey() return a copy of the Key bytes which starts from RK_length. Means from offsets, // we need to reduce the KL and VL parts. OnheapDecodedCell c1 = new OnheapDecodedCell(kv1.getKey(), kv1.getRowLength(), kv1.getFamilyOffset() - KeyValue.ROW_OFFSET, kv1.getFamilyLength(), kv1.getQualifierOffset() - KeyValue.ROW_OFFSET, kv1.getQualifierLength(), kv1.getTimestamp(), kv1.getTypeByte(), kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(), kv1.getSequenceId(), kv1.getTagsArray(), kv1.getTagsOffset(), kv1.getTagsLength()); KeyValue kv2 = new KeyValue(Bytes.toBytes("r2"), Bytes.toBytes("f"), Bytes.toBytes("2"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("2")); OnheapDecodedCell c2 = new OnheapDecodedCell(kv2.getKey(), kv2.getRowLength(), kv2.getFamilyOffset() - KeyValue.ROW_OFFSET, kv2.getFamilyLength(), kv2.getQualifierOffset() - KeyValue.ROW_OFFSET, kv2.getQualifierLength(), kv2.getTimestamp(), kv2.getTypeByte(), kv2.getValueArray(), kv2.getValueOffset(), kv2.getValueLength(), kv2.getSequenceId(), kv2.getTagsArray(), kv2.getTagsOffset(), kv2.getTagsLength()); KeyValue kv3 = new KeyValue(Bytes.toBytes("r3"), Bytes.toBytes("cf"), Bytes.toBytes("qual"), HConstants.LATEST_TIMESTAMP, Bytes.toBytes("3")); BufferedDataBlockEncoder.OffheapDecodedExtendedCell c3 = new BufferedDataBlockEncoder.OffheapDecodedExtendedCell(ByteBuffer.wrap(kv2.getKey()), kv2.getRowLength(), kv2.getFamilyOffset() - KeyValue.ROW_OFFSET, kv2.getFamilyLength(), kv2.getQualifierOffset() - KeyValue.ROW_OFFSET, kv2.getQualifierLength(), kv2.getTimestamp(), kv2.getTypeByte(), ByteBuffer.wrap(kv2.getValueArray()), kv2.getValueOffset(), kv2.getValueLength(), kv2.getSequenceId(), ByteBuffer.wrap(kv2.getTagsArray()), kv2.getTagsOffset(), kv2.getTagsLength()); ByteArrayOutputStream os = new ByteArrayOutputStream(); KeyValueCodecWithTags codec = new KeyValueCodecWithTags(); Encoder encoder = codec.getEncoder(os); encoder.write(c1); encoder.write(c2); encoder.write(c3); ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray()); Decoder decoder = codec.getDecoder(is); assertTrue(decoder.advance()); assertTrue(CellUtil.equals(c1, decoder.current())); assertTrue(decoder.advance()); assertTrue(CellUtil.equals(c2, decoder.current())); assertTrue(decoder.advance()); assertTrue(CellUtil.equals(c3, decoder.current())); assertFalse(decoder.advance()); }