Java Code Examples for org.apache.hadoop.hbase.CellComparatorImpl#COMPARATOR

The following examples show how to use org.apache.hadoop.hbase.CellComparatorImpl#COMPARATOR . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestCompactingToCellFlatMapMemStore.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Test
public void testCountOfCellsAfterFlatteningByIterator() throws IOException {
  String[] keys1 = { "A", "B", "C" }; // A, B, C
  addRowsByKeysWith50Cols(memstore, keys1);
  // this should only flatten as there are no duplicates
  ((CompactingMemStore) memstore).flushInMemory();
  while (((CompactingMemStore) memstore).isMemStoreFlushingInMemory()) {
    Threads.sleep(10);
  }
  // Just doing the cnt operation here
  MemStoreSegmentsIterator itr = new MemStoreMergerSegmentsIterator(
      ((CompactingMemStore) memstore).getImmutableSegments().getStoreSegments(),
      CellComparatorImpl.COMPARATOR, 10);
  int cnt = 0;
  try {
    while (itr.next() != null) {
      cnt++;
    }
  } finally {
    itr.close();
  }
  assertEquals("the count should be ", 150, cnt);
}
 
Example 2
Source File: FixedFileTrailer.java    From hbase with Apache License 2.0 6 votes vote down vote up
static CellComparator createComparator(String comparatorClassName) throws IOException {
  if (comparatorClassName.equals(CellComparatorImpl.COMPARATOR.getClass().getName())) {
    return CellComparatorImpl.COMPARATOR;
  } else if (comparatorClassName.equals(
    CellComparatorImpl.META_COMPARATOR.getClass().getName())) {
    return CellComparatorImpl.META_COMPARATOR;
  }
  try {
    Class<? extends CellComparator> comparatorClass = getComparatorClass(comparatorClassName);
    if (comparatorClass != null) {
      return comparatorClass.getDeclaredConstructor().newInstance();
    }
    LOG.warn("No Comparator class for " + comparatorClassName + ". Returning Null.");
    return null;
  } catch (Exception e) {
    throw new IOException("Comparator class " + comparatorClassName + " is not instantiable", e);
  }
}
 
Example 3
Source File: TestDefaultMemStore.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Test
public void testMultipleVersionsSimple() throws Exception {
  DefaultMemStore m = new DefaultMemStore(new Configuration(), CellComparatorImpl.COMPARATOR);
  byte [] row = Bytes.toBytes("testRow");
  byte [] family = Bytes.toBytes("testFamily");
  byte [] qf = Bytes.toBytes("testQualifier");
  long [] stamps = {1,2,3};
  byte [][] values = {Bytes.toBytes("value0"), Bytes.toBytes("value1"),
      Bytes.toBytes("value2")};
  KeyValue key0 = new KeyValue(row, family, qf, stamps[0], values[0]);
  KeyValue key1 = new KeyValue(row, family, qf, stamps[1], values[1]);
  KeyValue key2 = new KeyValue(row, family, qf, stamps[2], values[2]);

  m.add(key0, null);
  m.add(key1, null);
  m.add(key2, null);

  assertTrue("Expected memstore to hold 3 values, actually has " +
      m.getActive().getCellsCount(), m.getActive().getCellsCount() == 3);
}
 
Example 4
Source File: TestKeyValueHeap.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Test
public void testSeek() throws IOException {
  // Cases:
  // 1. Seek Cell that is not in scanner
  // 2. Check that smallest that is returned from a seek is correct
  List<Cell> expected = Arrays.asList(kv211);

  // Creating KeyValueHeap
  try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
    Cell seekKv = new KeyValue(row2, fam1, null, null);
    kvh.seek(seekKv);

    List<Cell> actual = Arrays.asList(kvh.peek());

    assertEquals("Expected = " + Arrays.toString(expected.toArray()) + "\n Actual = " +
      Arrays.toString(actual.toArray()), expected, actual);
  }
}
 
Example 5
Source File: TestScanWildcardColumnTracker.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Test
public void DisabledTestCheckColumnWrongOrder() {
  ScanWildcardColumnTracker tracker = new ScanWildcardColumnTracker(
      0, VERSIONS, Long.MIN_VALUE, CellComparatorImpl.COMPARATOR);

  // Create list of qualifiers
  List<byte[]> qualifiers = new ArrayList<>(2);
  qualifiers.add(Bytes.toBytes("qualifier2"));
  qualifiers.add(Bytes.toBytes("qualifier1"));

  try {
    for (byte[] qualifier : qualifiers) {
      ScanQueryMatcher.checkColumn(tracker, qualifier, 0, qualifier.length, 1,
        KeyValue.Type.Put.getCode(), false);
    }
    fail();
  } catch (IOException e) {
    // expected
  }
}
 
Example 6
Source File: BloomFilterFactory.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new general (Row or RowCol) Bloom filter at the time of
 * {@link org.apache.hadoop.hbase.regionserver.HStoreFile} writing.
 *
 * @param conf
 * @param cacheConf
 * @param bloomType
 * @param maxKeys an estimate of the number of keys we expect to insert.
 *        Irrelevant if compound Bloom filters are enabled.
 * @param writer the HFile writer
 * @return the new Bloom filter, or null in case Bloom filters are disabled
 *         or when failed to create one.
 */
public static BloomFilterWriter createGeneralBloomAtWrite(Configuration conf,
    CacheConfig cacheConf, BloomType bloomType, int maxKeys,
    HFile.Writer writer) {
  if (!isGeneralBloomEnabled(conf)) {
    LOG.trace("Bloom filters are disabled by configuration for "
        + writer.getPath()
        + (conf == null ? " (configuration is null)" : ""));
    return null;
  } else if (bloomType == BloomType.NONE) {
    LOG.trace("Bloom filter is turned off for the column family");
    return null;
  }

  float err = getErrorRate(conf);

  // In case of row/column Bloom filter lookups, each lookup is an OR if two
  // separate lookups. Therefore, if each lookup's false positive rate is p,
  // the resulting false positive rate is err = 1 - (1 - p)^2, and
  // p = 1 - sqrt(1 - err).
  if (bloomType == BloomType.ROWCOL) {
    err = (float) (1 - Math.sqrt(1 - err));
  }

  int maxFold = conf.getInt(IO_STOREFILE_BLOOM_MAX_FOLD,
      MAX_ALLOWED_FOLD_FACTOR);

  // Do we support compound bloom filters?
  // In case of compound Bloom filters we ignore the maxKeys hint.
  CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
      err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
      bloomType == BloomType.ROWCOL ? CellComparatorImpl.COMPARATOR : null, bloomType);
  writer.addInlineBlockWriter(bloomWriter);
  return bloomWriter;
}
 
Example 7
Source File: TestCompactingToCellFlatMapMemStore.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Override public void setUp() throws Exception {

    compactingSetUp();
    this.conf = HBaseConfiguration.create();

    // set memstore to do data compaction
    conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY,
        String.valueOf(MemoryCompactionPolicy.EAGER));
    conf.setDouble(CompactingMemStore.IN_MEMORY_FLUSH_THRESHOLD_FACTOR_KEY, 0.02);
    this.memstore =
        new MyCompactingMemStore(conf, CellComparatorImpl.COMPARATOR, store,
            regionServicesForStores, MemoryCompactionPolicy.EAGER);
  }
 
Example 8
Source File: TestStripeStoreFileManager.java    From hbase with Apache License 2.0 5 votes vote down vote up
private static StripeStoreFileManager createManager(
    ArrayList<HStoreFile> sfs, Configuration conf) throws Exception {
  StripeStoreConfig config = new StripeStoreConfig(
      conf, Mockito.mock(StoreConfigInformation.class));
  StripeStoreFileManager result = new StripeStoreFileManager(CellComparatorImpl.COMPARATOR, conf,
      config);
  result.loadFiles(sfs);
  return result;
}
 
Example 9
Source File: TestScanWildcardColumnTracker.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckColumnEnforceVersions() throws IOException {
  ScanWildcardColumnTracker tracker = new ScanWildcardColumnTracker(
      0, VERSIONS, Long.MIN_VALUE, CellComparatorImpl.COMPARATOR);

  // Create list of qualifiers
  List<byte[]> qualifiers = new ArrayList<>(4);
  qualifiers.add(Bytes.toBytes("qualifier1"));
  qualifiers.add(Bytes.toBytes("qualifier1"));
  qualifiers.add(Bytes.toBytes("qualifier1"));
  qualifiers.add(Bytes.toBytes("qualifier2"));

  // Setting up expected result
  List<ScanQueryMatcher.MatchCode> expected = new ArrayList<>(4);
  expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
  expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
  expected.add(ScanQueryMatcher.MatchCode.SEEK_NEXT_COL);
  expected.add(ScanQueryMatcher.MatchCode.INCLUDE);

  List<MatchCode> actual = new ArrayList<>(qualifiers.size());

  long timestamp = 0;
  for (byte[] qualifier : qualifiers) {
    MatchCode mc = ScanQueryMatcher.checkColumn(tracker, qualifier, 0, qualifier.length,
      ++timestamp, KeyValue.Type.Put.getCode(), false);
    actual.add(mc);
  }

  // Compare actual with expected
  for (int i = 0; i < expected.size(); i++) {
    assertEquals(expected.get(i), actual.get(i));
  }
}
 
Example 10
Source File: TestScanWildcardColumnTracker.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Test
public void testCheckColumnOk() throws IOException {
  ScanWildcardColumnTracker tracker = new ScanWildcardColumnTracker(
      0, VERSIONS, Long.MIN_VALUE, CellComparatorImpl.COMPARATOR);

  // Create list of qualifiers
  List<byte[]> qualifiers = new ArrayList<>(4);
  qualifiers.add(Bytes.toBytes("qualifier1"));
  qualifiers.add(Bytes.toBytes("qualifier2"));
  qualifiers.add(Bytes.toBytes("qualifier3"));
  qualifiers.add(Bytes.toBytes("qualifier4"));

  // Setting up expected result
  List<MatchCode> expected = new ArrayList<>(4);
  expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
  expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
  expected.add(ScanQueryMatcher.MatchCode.INCLUDE);
  expected.add(ScanQueryMatcher.MatchCode.INCLUDE);

  List<ScanQueryMatcher.MatchCode> actual = new ArrayList<>(qualifiers.size());

  for (byte[] qualifier : qualifiers) {
    ScanQueryMatcher.MatchCode mc = ScanQueryMatcher.checkColumn(tracker, qualifier, 0,
      qualifier.length, 1, KeyValue.Type.Put.getCode(), false);
    actual.add(mc);
  }

  // Compare actual with expected
  for (int i = 0; i < expected.size(); i++) {
    assertEquals(expected.get(i), actual.get(i));
  }
}
 
Example 11
Source File: TestKeyValueHeap.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Test
public void testScannerLeak() throws IOException {
  // Test for unclosed scanners (HBASE-1927)

  TestScanner s4 = new TestScanner(new ArrayList<>());
  scanners.add(s4);

  // Creating KeyValueHeap
  try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
    for (;;) {
      if (kvh.next() == null) {
        break;
      }
    }
    // Once the internal scanners go out of Cells, those will be removed from KVHeap's priority
    // queue and added to a Set for lazy close. The actual close will happen only on
    // KVHeap#close()
    assertEquals(4, kvh.scannersForDelayedClose.size());
    assertTrue(kvh.scannersForDelayedClose.contains(s1));
    assertTrue(kvh.scannersForDelayedClose.contains(s2));
    assertTrue(kvh.scannersForDelayedClose.contains(s3));
    assertTrue(kvh.scannersForDelayedClose.contains(s4));
  }

  for (KeyValueScanner scanner : scanners) {
    assertTrue(((TestScanner) scanner).isClosed());
  }
}
 
Example 12
Source File: TestKeyValueHeap.java    From hbase with Apache License 2.0 5 votes vote down vote up
public List<Cell> assertCells(List<Cell> expected, List<KeyValueScanner> scanners)
  throws IOException {
  // Creating KeyValueHeap
  try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
    List<Cell> actual = new ArrayList<>();
    while (kvh.peek() != null) {
      actual.add(kvh.next());
    }

    assertEquals(expected, actual);
    return actual;
  }
}
 
Example 13
Source File: TestDefaultMemStore.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Add keyvalues with a fixed memstoreTs, and checks that memstore size is decreased
 * as older keyvalues are deleted from the memstore.
 * @throws Exception
 */
@Test
public void testUpsertMemstoreSize() throws Exception {
  Configuration conf = HBaseConfiguration.create();
  memstore = new DefaultMemStore(conf, CellComparatorImpl.COMPARATOR);
  MemStoreSize oldSize = memstore.size();

  List<Cell> l = new ArrayList<>();
  KeyValue kv1 = KeyValueTestUtil.create("r", "f", "q", 100, "v");
  KeyValue kv2 = KeyValueTestUtil.create("r", "f", "q", 101, "v");
  KeyValue kv3 = KeyValueTestUtil.create("r", "f", "q", 102, "v");

  kv1.setSequenceId(1); kv2.setSequenceId(1);kv3.setSequenceId(1);
  l.add(kv1); l.add(kv2); l.add(kv3);

  this.memstore.upsert(l, 2, null);// readpoint is 2
  MemStoreSize newSize = this.memstore.size();
  assert (newSize.getDataSize() > oldSize.getDataSize());
  //The kv1 should be removed.
  assert(memstore.getActive().getCellsCount() == 2);

  KeyValue kv4 = KeyValueTestUtil.create("r", "f", "q", 104, "v");
  kv4.setSequenceId(1);
  l.clear(); l.add(kv4);
  this.memstore.upsert(l, 3, null);
  assertEquals(newSize, this.memstore.size());
  //The kv2 should be removed.
  assert(memstore.getActive().getCellsCount() == 2);
  //this.memstore = null;
}
 
Example 14
Source File: TestScanDeleteTracker.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Before
public void setUp() throws Exception {
  sdt = new ScanDeleteTracker(CellComparatorImpl.COMPARATOR);
}
 
Example 15
Source File: HBaseShims.java    From phoenix-omid with Apache License 2.0 4 votes vote down vote up
public static CellComparator cellComparatorInstance() {
    return CellComparatorImpl.COMPARATOR;
}
 
Example 16
Source File: TestReversibleScanners.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Test
public void testReversibleStoreScanner() throws IOException {
  // write data to one memstore and two store files
  FileSystem fs = TEST_UTIL.getTestFileSystem();
  Path hfilePath = new Path(new Path(
      TEST_UTIL.getDataTestDir("testReversibleStoreScanner"), "regionname"),
      "familyname");
  CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
  HFileContextBuilder hcBuilder = new HFileContextBuilder();
  hcBuilder.withBlockSize(2 * 1024);
  HFileContext hFileContext = hcBuilder.build();
  StoreFileWriter writer1 = new StoreFileWriter.Builder(
      TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(
      hfilePath).withFileContext(hFileContext).build();
  StoreFileWriter writer2 = new StoreFileWriter.Builder(
      TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(
      hfilePath).withFileContext(hFileContext).build();

  MemStore memstore = new DefaultMemStore();
  writeMemstoreAndStoreFiles(memstore, new StoreFileWriter[] { writer1,
      writer2 });

  HStoreFile sf1 = new HStoreFile(fs, writer1.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
      BloomType.NONE, true);

  HStoreFile sf2 = new HStoreFile(fs, writer2.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
      BloomType.NONE, true);

  ScanInfo scanInfo =
      new ScanInfo(TEST_UTIL.getConfiguration(), FAMILYNAME, 0, Integer.MAX_VALUE, Long.MAX_VALUE,
          KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparatorImpl.COMPARATOR, false);

  // Case 1.Test a full reversed scan
  Scan scan = new Scan();
  scan.setReversed(true);
  StoreScanner storeScanner =
      getReversibleStoreScanner(memstore, sf1, sf2, scan, scanInfo, MAXMVCC);
  verifyCountAndOrder(storeScanner, QUALSIZE * ROWSIZE, ROWSIZE, false);

  // Case 2.Test reversed scan with a specified start row
  int startRowNum = ROWSIZE / 2;
  byte[] startRow = ROWS[startRowNum];
  scan.withStartRow(startRow);
  storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanInfo, MAXMVCC);
  verifyCountAndOrder(storeScanner, QUALSIZE * (startRowNum + 1),
      startRowNum + 1, false);

  // Case 3.Test reversed scan with a specified start row and specified
  // qualifiers
  assertTrue(QUALSIZE > 2);
  scan.addColumn(FAMILYNAME, QUALS[0]);
  scan.addColumn(FAMILYNAME, QUALS[2]);
  storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanInfo, MAXMVCC);
  verifyCountAndOrder(storeScanner, 2 * (startRowNum + 1), startRowNum + 1,
      false);

  // Case 4.Test reversed scan with mvcc based on case 3
  for (int readPoint = 0; readPoint < MAXMVCC; readPoint++) {
    LOG.info("Setting read point to " + readPoint);
    storeScanner = getReversibleStoreScanner(memstore, sf1, sf2, scan, scanInfo, readPoint);
    int expectedRowCount = 0;
    int expectedKVCount = 0;
    for (int i = startRowNum; i >= 0; i--) {
      int kvCount = 0;
      if (makeMVCC(i, 0) <= readPoint) {
        kvCount++;
      }
      if (makeMVCC(i, 2) <= readPoint) {
        kvCount++;
      }
      if (kvCount > 0) {
        expectedRowCount++;
        expectedKVCount += kvCount;
      }
    }
    verifyCountAndOrder(storeScanner, expectedKVCount, expectedRowCount,
        false);
  }
}
 
Example 17
Source File: TestReversibleScanners.java    From hbase with Apache License 2.0 4 votes vote down vote up
private ReversedKeyValueHeap getReversibleKeyValueHeap(MemStore memstore, HStoreFile sf1,
    HStoreFile sf2, byte[] startRow, int readPoint) throws IOException {
  List<KeyValueScanner> scanners = getScanners(memstore, sf1, sf2, startRow, true, readPoint);
  ReversedKeyValueHeap kvHeap = new ReversedKeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
  return kvHeap;
}
 
Example 18
Source File: TestHFileBlockIndex.java    From hbase with Apache License 2.0 4 votes vote down vote up
private void readIndex(boolean useTags) throws IOException {
  long fileSize = fs.getFileStatus(path).getLen();
  LOG.info("Size of {}: {} compression={}", path, fileSize, compr.toString());

  FSDataInputStream istream = fs.open(path);
  HFileContext meta = new HFileContextBuilder()
                      .withHBaseCheckSum(true)
                      .withIncludesMvcc(includesMemstoreTS)
                      .withIncludesTags(useTags)
                      .withCompression(compr)
                      .build();
  ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, path).build();
  HFileBlock.FSReader blockReader = new HFileBlock.FSReaderImpl(context, meta,
      ByteBuffAllocator.HEAP);

  BlockReaderWrapper brw = new BlockReaderWrapper(blockReader);
  HFileBlockIndex.BlockIndexReader indexReader =
      new HFileBlockIndex.CellBasedKeyBlockIndexReader(
          CellComparatorImpl.COMPARATOR, numLevels);

  indexReader.readRootIndex(blockReader.blockRange(rootIndexOffset,
      fileSize).nextBlockWithBlockType(BlockType.ROOT_INDEX), numRootEntries);

  long prevOffset = -1;
  int i = 0;
  int expectedHitCount = 0;
  int expectedMissCount = 0;
  LOG.info("Total number of keys: " + keys.size());
  for (byte[] key : keys) {
    assertTrue(key != null);
    assertTrue(indexReader != null);
    KeyValue.KeyOnlyKeyValue keyOnlyKey = new KeyValue.KeyOnlyKeyValue(key, 0, key.length);
    HFileBlock b =
        indexReader.seekToDataBlock(keyOnlyKey, null, true,
          true, false, null, brw);
    if (PrivateCellUtil.compare(CellComparatorImpl.COMPARATOR, keyOnlyKey, firstKeyInFile, 0,
      firstKeyInFile.length) < 0) {
      assertTrue(b == null);
      ++i;
      continue;
    }

    String keyStr = "key #" + i + ", " + Bytes.toStringBinary(key);

    assertTrue("seekToDataBlock failed for " + keyStr, b != null);

    if (prevOffset == b.getOffset()) {
      assertEquals(++expectedHitCount, brw.hitCount);
    } else {
      LOG.info("First key in a new block: " + keyStr + ", block offset: "
          + b.getOffset() + ")");
      assertTrue(b.getOffset() > prevOffset);
      assertEquals(++expectedMissCount, brw.missCount);
      prevOffset = b.getOffset();
    }
    ++i;
  }

  istream.close();
}
 
Example 19
Source File: GenericKeyValueBuilder.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Override
public CellComparator getKeyValueComparator() {
    return CellComparatorImpl.COMPARATOR;
}