Java Code Examples for org.apache.hadoop.hbase.io.encoding.DataBlockEncoding#values()

The following examples show how to use org.apache.hadoop.hbase.io.encoding.DataBlockEncoding#values() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseTestingUtility.java    From hbase with Apache License 2.0 6 votes vote down vote up
/**
 * Create a set of column descriptors with the combination of compression,
 * encoding, bloom codecs available.
 * @param prefix family names prefix
 * @return the list of column descriptors
 */
public static List<ColumnFamilyDescriptor> generateColumnDescriptors(final String prefix) {
  List<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<>();
  long familyId = 0;
  for (Compression.Algorithm compressionType: getSupportedCompressionAlgorithms()) {
    for (DataBlockEncoding encodingType: DataBlockEncoding.values()) {
      for (BloomType bloomType: BloomType.values()) {
        String name = String.format("%s-cf-!@#&-%d!@#", prefix, familyId);
        ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder =
          ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(name));
        columnFamilyDescriptorBuilder.setCompressionType(compressionType);
        columnFamilyDescriptorBuilder.setDataBlockEncoding(encodingType);
        columnFamilyDescriptorBuilder.setBloomFilterType(bloomType);
        columnFamilyDescriptors.add(columnFamilyDescriptorBuilder.build());
        familyId++;
      }
    }
  }
  return columnFamilyDescriptors;
}
 
Example 2
Source File: TestReversibleScanners.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Test
public void testReversibleStoreFileScanner() throws IOException {
  FileSystem fs = TEST_UTIL.getTestFileSystem();
  Path hfilePath = new Path(new Path(
      TEST_UTIL.getDataTestDir("testReversibleStoreFileScanner"),
      "regionname"), "familyname");
  CacheConfig cacheConf = new CacheConfig(TEST_UTIL.getConfiguration());
  for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
    HFileContextBuilder hcBuilder = new HFileContextBuilder();
    hcBuilder.withBlockSize(2 * 1024);
    hcBuilder.withDataBlockEncoding(encoding);
    HFileContext hFileContext = hcBuilder.build();
    StoreFileWriter writer = new StoreFileWriter.Builder(
        TEST_UTIL.getConfiguration(), cacheConf, fs).withOutputDir(hfilePath)
        .withFileContext(hFileContext).build();
    writeStoreFile(writer);

    HStoreFile sf = new HStoreFile(fs, writer.getPath(), TEST_UTIL.getConfiguration(), cacheConf,
        BloomType.NONE, true);

    List<StoreFileScanner> scanners = StoreFileScanner
        .getScannersForStoreFiles(Collections.singletonList(sf),
            false, true, false, false, Long.MAX_VALUE);
    StoreFileScanner scanner = scanners.get(0);
    seekTestOfReversibleKeyValueScanner(scanner);
    for (int readPoint = 0; readPoint < MAXMVCC; readPoint++) {
      LOG.info("Setting read point to " + readPoint);
      scanners = StoreFileScanner.getScannersForStoreFiles(
          Collections.singletonList(sf), false, true, false, false, readPoint);
      seekTestOfReversibleKeyValueScannerWithMVCC(scanners, readPoint);
    }
  }

}
 
Example 3
Source File: TestHFileWriterV3WithDataEncoders.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Parameterized.Parameters
public static Collection<Object[]> parameters() {
  DataBlockEncoding[] dataBlockEncodings = DataBlockEncoding.values();
  Object[][] params = new Object[dataBlockEncodings.length * 2 - 2][];
  int i = 0;
  for (DataBlockEncoding dataBlockEncoding : dataBlockEncodings) {
    if (dataBlockEncoding == DataBlockEncoding.NONE) {
      continue;
    }
    params[i++] = new Object[]{false, dataBlockEncoding};
    params[i++] = new Object[]{true, dataBlockEncoding};
  }
  return Arrays.asList(params);
}
 
Example 4
Source File: TestSeekTo.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Parameters
public static Collection<Object[]> parameters() {
  List<Object[]> paramList = new ArrayList<>();
  for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
    paramList.add(new Object[] { encoding });
  }
  return paramList;
}
 
Example 5
Source File: TestHFileDataBlockEncoder.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * @return All possible data block encoding configurations
 */
@Parameters
public static Collection<Object[]> getAllConfigurations() {
  List<Object[]> configurations = new ArrayList<>();

  for (DataBlockEncoding diskAlgo : DataBlockEncoding.values()) {
    for (boolean includesMemstoreTS : new boolean[] { false, true }) {
      HFileDataBlockEncoder dbe = (diskAlgo == DataBlockEncoding.NONE) ?
          NoOpDataBlockEncoder.INSTANCE : new HFileDataBlockEncoderImpl(diskAlgo);
      configurations.add(new Object[] { dbe, new Boolean(includesMemstoreTS) });
    }
  }

  return configurations;
}
 
Example 6
Source File: TestHFile.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Test
public void testDBEShipped() throws IOException {
  for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
    DataBlockEncoder encoder = encoding.getEncoder();
    if (encoder == null) {
      continue;
    }
    Path f = new Path(ROOT_DIR, testName.getMethodName() + "_" + encoding);
    HFileContext context = new HFileContextBuilder()
        .withIncludesTags(false)
        .withDataBlockEncoding(encoding).build();
    HFileWriterImpl writer = (HFileWriterImpl) HFile.getWriterFactory(conf, cacheConf)
        .withPath(fs, f).withFileContext(context).create();

    KeyValue kv = new KeyValue(Bytes.toBytes("testkey1"), Bytes.toBytes("family"),
        Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
    KeyValue kv2 = new KeyValue(Bytes.toBytes("testkey2"), Bytes.toBytes("family"),
      Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));
    KeyValue kv3 = new KeyValue(Bytes.toBytes("testkey3"), Bytes.toBytes("family"),
      Bytes.toBytes("qual"), Bytes.toBytes("testvalue"));

    ByteBuffer buffer = ByteBuffer.wrap(kv.getBuffer());
    ByteBuffer buffer2 = ByteBuffer.wrap(kv2.getBuffer());
    ByteBuffer buffer3 = ByteBuffer.wrap(kv3.getBuffer());

    writer.append(new ByteBufferKeyValue(buffer, 0, buffer.remaining()));
    writer.beforeShipped();

    // pollute first cell's backing ByteBuffer
    ByteBufferUtils.copyFromBufferToBuffer(buffer3, buffer);

    // write another cell, if DBE not Shipped, test will fail
    writer.append(new ByteBufferKeyValue(buffer2, 0, buffer2.remaining()));
    writer.close();
  }
}
 
Example 7
Source File: TestMobDataBlockEncoding.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Test
public void testDataBlockEncoding() throws Exception {
  for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
    testDataBlockEncoding(encoding);
  }
}
 
Example 8
Source File: DataBlockEncodingTool.java    From hbase with Apache License 2.0 4 votes vote down vote up
/**
 * Check statistics for given HFile for different data block encoders.
 * @param scanner Of file which will be compressed.
 * @param kvLimit Maximal count of KeyValue which will be processed.
 * @throws IOException thrown if scanner is invalid
 */
public void checkStatistics(final KeyValueScanner scanner, final int kvLimit)
    throws IOException {
  scanner.seek(KeyValue.LOWESTKEY);

  KeyValue currentKV;

  byte[] previousKey = null;
  byte[] currentKey;

  DataBlockEncoding[] encodings = DataBlockEncoding.values();

  ByteArrayOutputStream uncompressedOutputStream =
      new ByteArrayOutputStream();

  int j = 0;
  while ((currentKV = KeyValueUtil.ensureKeyValue(scanner.next())) != null && j < kvLimit) {
    // Iterates through key/value pairs
    j++;
    currentKey = currentKV.getKey();
    if (previousKey != null) {
      for (int i = 0; i < previousKey.length && i < currentKey.length &&
          previousKey[i] == currentKey[i]; ++i) {
        totalKeyRedundancyLength++;
      }
    }

    // Add tagsLen zero to cells don't include tags. Since the process of
    // scanner converts byte array to KV would abandon tagsLen part if tagsLen
    // is zero. But we still needs the tagsLen part to check if current cell
    // include tags. If USE_TAG is true, HFile contains cells with tags,
    // if the cell tagsLen equals 0, it means other cells may have tags.
    if (USE_TAG && currentKV.getTagsLength() == 0) {
      uncompressedOutputStream.write(currentKV.getBuffer(),
          currentKV.getOffset(), currentKV.getLength());
      // write tagsLen = 0.
      uncompressedOutputStream.write(Bytes.toBytes((short) 0));
    } else {
      uncompressedOutputStream.write(currentKV.getBuffer(),
          currentKV.getOffset(), currentKV.getLength());
    }

    if(includesMemstoreTS) {
      WritableUtils.writeVLong(
          new DataOutputStream(uncompressedOutputStream), currentKV.getSequenceId());
    }

    previousKey = currentKey;

    int kLen = currentKV.getKeyLength();
    int vLen = currentKV.getValueLength();
    int cfLen = currentKV.getFamilyLength(currentKV.getFamilyOffset());
    int restLen = currentKV.getLength() - kLen - vLen;

    totalKeyLength += kLen;
    totalValueLength += vLen;
    totalPrefixLength += restLen;
    totalCFLength += cfLen;
  }

  rawKVs = uncompressedOutputStream.toByteArray();
  for (DataBlockEncoding encoding : encodings) {
    if (encoding == DataBlockEncoding.NONE) {
      continue;
    }
    DataBlockEncoder d = encoding.getEncoder();
    HFileContext meta = new HFileContextBuilder()
        .withDataBlockEncoding(encoding)
        .withCompression(Compression.Algorithm.NONE)
        .withIncludesMvcc(includesMemstoreTS)
        .withIncludesTags(USE_TAG).build();
    codecs.add(new EncodedDataBlock(d, encoding, rawKVs, meta ));
  }
}