Java Code Examples for org.apache.hadoop.hbase.CellUtil#matchingRowColumn()

The following examples show how to use org.apache.hadoop.hbase.CellUtil#matchingRowColumn() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RowColBloomContext.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Override
protected boolean isNewKey(Cell cell) {
  if (this.getLastCell() != null) {
    return !CellUtil.matchingRowColumn(cell, this.getLastCell());
  }
  return true;
}
 
Example 2
Source File: StoreScanner.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * See {@link org.apache.hadoop.hbase.regionserver.StoreScanner#trySkipToNextRow(Cell)}
 * @param cell current cell
 * @return true means skip to next column, false means not
 */
@VisibleForTesting
protected boolean trySkipToNextColumn(Cell cell) throws IOException {
  Cell nextCell = null;
  // used to guard against a changed next indexed key by doing a identity comparison
  // when the identity changes we need to compare the bytes again
  Cell previousIndexedKey = null;
  do {
    Cell nextIndexedKey = getNextIndexedKey();
    if (nextIndexedKey != null && nextIndexedKey != KeyValueScanner.NO_NEXT_INDEXED_KEY &&
        (nextIndexedKey == previousIndexedKey ||
        matcher.compareKeyForNextColumn(nextIndexedKey, cell) >= 0)) {
      this.heap.next();
      ++kvsScanned;
      previousIndexedKey = nextIndexedKey;
    } else {
      return false;
    }
  } while ((nextCell = this.heap.peek()) != null && CellUtil.matchingRowColumn(cell, nextCell));
  // We need this check because it may happen that the new scanner that we get
  // during heap.next() is requiring reseek due of fake KV previously generated for
  // ROWCOL bloom filter optimization. See HBASE-19863 for more details
  if (nextCell != null && matcher.compareKeyForNextColumn(nextCell, cell) < 0) {
    return false;
  }
  return true;
}
 
Example 3
Source File: CellArrayImmutableSegment.java    From hbase with Apache License 2.0 5 votes vote down vote up
private void reinitializeCellSet(
    int numOfCells, KeyValueScanner segmentScanner, CellSet oldCellSet,
    MemStoreCompactionStrategy.Action action) {
  Cell[] cells = new Cell[numOfCells];   // build the Cell Array
  Cell curCell;
  int idx = 0;
  int numUniqueKeys=0;
  Cell prev = null;
  try {
    while ((curCell = segmentScanner.next()) != null) {
      cells[idx++] = curCell;
      if(action == MemStoreCompactionStrategy.Action.FLATTEN_COUNT_UNIQUE_KEYS) {
        //counting number of unique keys
        if (prev != null) {
          if (!CellUtil.matchingRowColumn(prev, curCell)) {
            numUniqueKeys++;
          }
        } else {
          numUniqueKeys++;
        }
      }
      prev = curCell;
    }
  } catch (IOException ie) {
    throw new IllegalStateException(ie);
  } finally {
    segmentScanner.close();
  }
  if(action != MemStoreCompactionStrategy.Action.FLATTEN_COUNT_UNIQUE_KEYS) {
    numUniqueKeys = CellSet.UNKNOWN_NUM_UNIQUES;
  }
  // build the immutable CellSet
  CellArrayMap cam = new CellArrayMap(getComparator(), cells, 0, idx, false);
  // update the CellSet of this Segment
  this.setCellSet(oldCellSet, new CellSet(cam, numUniqueKeys));
}
 
Example 4
Source File: TestCompoundBloomFilter.java    From hbase with Apache License 2.0 5 votes vote down vote up
private Path writeStoreFile(int t, BloomType bt, List<KeyValue> kvs)
    throws IOException {
  conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE,
      BLOOM_BLOCK_SIZES[t]);
  conf.setBoolean(CacheConfig.CACHE_BLOCKS_ON_WRITE_KEY, true);
  cacheConf = new CacheConfig(conf, blockCache);
  HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCK_SIZES[t]).build();
  StoreFileWriter w = new StoreFileWriter.Builder(conf, cacheConf, fs)
          .withOutputDir(TEST_UTIL.getDataTestDir())
          .withBloomType(bt)
          .withFileContext(meta)
          .build();

  assertTrue(w.hasGeneralBloom());
  assertTrue(w.getGeneralBloomWriter() instanceof CompoundBloomFilterWriter);
  CompoundBloomFilterWriter cbbf =
      (CompoundBloomFilterWriter) w.getGeneralBloomWriter();

  int keyCount = 0;
  KeyValue prev = null;
  LOG.debug("Total keys/values to insert: " + kvs.size());
  for (KeyValue kv : kvs) {
    w.append(kv);

    // Validate the key count in the Bloom filter.
    boolean newKey = true;
    if (prev != null) {
      newKey = !(bt == BloomType.ROW ? CellUtil.matchingRows(kv,
          prev) : CellUtil.matchingRowColumn(kv, prev));
    }
    if (newKey)
      ++keyCount;
    assertEquals(keyCount, cbbf.getKeyCount());

    prev = kv;
  }
  w.close();

  return w.getPath();
}
 
Example 5
Source File: CellChunkImmutableSegment.java    From hbase with Apache License 2.0 4 votes vote down vote up
private void reinitializeCellSet(
    int numOfCells, KeyValueScanner segmentScanner, CellSet oldCellSet,
    MemStoreSizing memstoreSizing, MemStoreCompactionStrategy.Action action) {
  Cell curCell;
  Chunk[] chunks = allocIndexChunks(numOfCells);

  int currentChunkIdx = 0;
  int offsetInCurentChunk = ChunkCreator.SIZEOF_CHUNK_HEADER;

  int numUniqueKeys=0;
  Cell prev = null;
  try {
    while ((curCell = segmentScanner.next()) != null) {
      assert(curCell instanceof ExtendedCell);
      if (((ExtendedCell)curCell).getChunkId() == ExtendedCell.CELL_NOT_BASED_ON_CHUNK) {
        // CellChunkMap assumes all cells are allocated on MSLAB.
        // Therefore, cells which are not allocated on MSLAB initially,
        // are copied into MSLAB here.
        curCell = copyCellIntoMSLAB(curCell, memstoreSizing);
      }
      if (offsetInCurentChunk + ClassSize.CELL_CHUNK_MAP_ENTRY > chunks[currentChunkIdx].size) {
        // continue to the next metadata chunk
        currentChunkIdx++;
        offsetInCurentChunk = ChunkCreator.SIZEOF_CHUNK_HEADER;
      }
      offsetInCurentChunk =
          createCellReference((ByteBufferKeyValue) curCell, chunks[currentChunkIdx].getData(),
              offsetInCurentChunk);
      if(action == MemStoreCompactionStrategy.Action.FLATTEN_COUNT_UNIQUE_KEYS) {
        //counting number of unique keys
        if (prev != null) {
          if (!CellUtil.matchingRowColumn(prev, curCell)) {
            numUniqueKeys++;
          }
        } else {
          numUniqueKeys++;
        }
      }
      prev = curCell;
    }
    if(action != MemStoreCompactionStrategy.Action.FLATTEN_COUNT_UNIQUE_KEYS) {
      numUniqueKeys = CellSet.UNKNOWN_NUM_UNIQUES;
    }
  } catch (IOException ie) {
    throw new IllegalStateException(ie);
  } finally {
    segmentScanner.close();
  }

  CellChunkMap ccm = new CellChunkMap(getComparator(), chunks, 0, numOfCells, false);
  // update the CellSet of this Segment
  this.setCellSet(oldCellSet, new CellSet(ccm, numUniqueKeys));
}