Java Code Examples for org.apache.hadoop.hbase.io.encoding.DataBlockEncoding#valueOf()

The following examples show how to use org.apache.hadoop.hbase.io.encoding.DataBlockEncoding#valueOf() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HFileDataBlockEncoderImpl.java    From hbase with Apache License 2.0 6 votes vote down vote up
public static HFileDataBlockEncoder createFromFileInfo(
    HFileInfo fileInfo) throws IOException {
  DataBlockEncoding encoding = DataBlockEncoding.NONE;
  byte[] dataBlockEncodingType = fileInfo.get(DATA_BLOCK_ENCODING);
  if (dataBlockEncodingType != null) {
    String dataBlockEncodingStr = Bytes.toString(dataBlockEncodingType);
    try {
      encoding = DataBlockEncoding.valueOf(dataBlockEncodingStr);
    } catch (IllegalArgumentException ex) {
      throw new IOException("Invalid data block encoding type in file info: "
        + dataBlockEncodingStr, ex);
    }
  }

  if (encoding == DataBlockEncoding.NONE) {
    return NoOpDataBlockEncoder.INSTANCE;
  }
  return new HFileDataBlockEncoderImpl(encoding);
}
 
Example 2
Source File: IntegrationTestLazyCfLoading.java    From hbase with Apache License 2.0 6 votes vote down vote up
private void createTable() throws Exception {
  deleteTable();
  LOG.info("Creating table");
  Configuration conf = util.getConfiguration();
  String encodingKey = String.format(ENCODING_KEY, this.getClass().getSimpleName());
  DataBlockEncoding blockEncoding = DataBlockEncoding.valueOf(conf.get(encodingKey, "FAST_DIFF"));
  TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor =
    new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME);
  for (byte[] cf : dataGen.getColumnFamilies()) {
    ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor familyDescriptor =
      new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(cf);
    familyDescriptor.setDataBlockEncoding(blockEncoding);
    tableDescriptor.setColumnFamily(familyDescriptor);
  }
  int serverCount = util.getHBaseClusterInterface().getClusterMetrics()
    .getLiveServerMetrics().size();
  byte[][] splits = new RegionSplitter.HexStringSplit().split(serverCount * REGIONS_PER_SERVER);
  util.getAdmin().createTable(tableDescriptor, splits);
  LOG.info("Created table");
}
 
Example 3
Source File: DataBlockEncodingValidator.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Check DataBlockEncodings of column families are compatible.
 *
 * @return number of column families with incompatible DataBlockEncoding
 * @throws IOException if a remote or network exception occurs
 */
private int validateDBE() throws IOException {
  int incompatibilities = 0;

  LOG.info("Validating Data Block Encodings");

  try (Connection connection = ConnectionFactory.createConnection(getConf());
      Admin admin = connection.getAdmin()) {
    List<TableDescriptor> tableDescriptors = admin.listTableDescriptors();
    String encoding = "";

    for (TableDescriptor td : tableDescriptors) {
      ColumnFamilyDescriptor[] columnFamilies = td.getColumnFamilies();
      for (ColumnFamilyDescriptor cfd : columnFamilies) {
        try {
          encoding = Bytes.toString(cfd.getValue(DATA_BLOCK_ENCODING));
          // IllegalArgumentException will be thrown if encoding is incompatible with 2.0
          DataBlockEncoding.valueOf(encoding);
        } catch (IllegalArgumentException e) {
          incompatibilities++;
          LOG.warn("Incompatible DataBlockEncoding for table: {}, cf: {}, encoding: {}",
              td.getTableName().getNameAsString(), cfd.getNameAsString(), encoding);
        }
      }
    }
  }

  if (incompatibilities > 0) {
    LOG.warn("There are {} column families with incompatible Data Block Encodings. Do not "
        + "upgrade until these encodings are converted to a supported one. "
        + "Check https://s.apache.org/prefixtree for instructions.", incompatibilities);
  } else {
    LOG.info("The used Data Block Encodings are compatible with HBase 2.0.");
  }

  return incompatibilities;
}
 
Example 4
Source File: TestJoinedScanners.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Command line interface:
 * @param args
 * @throws IOException if there is a bug while reading from disk
 */
public static void main(final String[] args) throws Exception {
  Option encodingOption = new Option("e", "blockEncoding", true,
    "Data block encoding; Default: FAST_DIFF");
  encodingOption.setRequired(false);
  options.addOption(encodingOption);

  Option ratioOption = new Option("r", "selectionRatio", true,
    "Ratio of selected rows using essential column family");
  ratioOption.setRequired(false);
  options.addOption(ratioOption);

  Option widthOption = new Option("w", "valueWidth", true,
    "Width of value for non-essential column family");
  widthOption.setRequired(false);
  options.addOption(widthOption);

  CommandLineParser parser = new GnuParser();
  CommandLine cmd = parser.parse(options, args);
  if (args.length < 1) {
    HelpFormatter formatter = new HelpFormatter();
    formatter.printHelp("TestJoinedScanners", options, true);
  }

  if (cmd.hasOption("e")) {
    blockEncoding = DataBlockEncoding.valueOf(cmd.getOptionValue("e"));
  }
  if (cmd.hasOption("r")) {
    selectionRatio = Integer.parseInt(cmd.getOptionValue("r"));
  }
  if (cmd.hasOption("w")) {
    valueWidth = Integer.parseInt(cmd.getOptionValue("w"));
  }
  // run the test
  TestJoinedScanners test = new TestJoinedScanners();
  test.testJoinedScanners();
}
 
Example 5
Source File: LoadTestTool.java    From hbase with Apache License 2.0 5 votes vote down vote up
private void parseColumnFamilyOptions(CommandLine cmd) {
  String dataBlockEncodingStr = cmd.getOptionValue(HFileTestUtil.OPT_DATA_BLOCK_ENCODING);
  dataBlockEncodingAlgo = dataBlockEncodingStr == null ? null :
      DataBlockEncoding.valueOf(dataBlockEncodingStr);

  String compressStr = cmd.getOptionValue(OPT_COMPRESSION);
  compressAlgo = compressStr == null ? Compression.Algorithm.NONE :
      Compression.Algorithm.valueOf(compressStr);

  String bloomStr = cmd.getOptionValue(OPT_BLOOM);
  bloomType = bloomStr == null ? BloomType.ROW :
      BloomType.valueOf(bloomStr);

  if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {
    if (!cmd.hasOption(OPT_BLOOM_PARAM)) {
      LOG.error("the parameter of bloom filter {} is not specified", bloomType.name());
    } else {
      conf.set(BloomFilterUtil.PREFIX_LENGTH_KEY, cmd.getOptionValue(OPT_BLOOM_PARAM));
    }
  }

  inMemoryCF = cmd.hasOption(OPT_INMEMORY);
  if (cmd.hasOption(OPT_ENCRYPTION)) {
    cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));
  }

}
 
Example 6
Source File: CubeHTableUtil.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
public static HColumnDescriptor createColumnFamily(KylinConfig kylinConfig, String cfName, boolean isMemoryHungry) {
    HColumnDescriptor cf = new HColumnDescriptor(cfName);
    cf.setMaxVersions(1);

    if (isMemoryHungry) {
        cf.setBlocksize(kylinConfig.getHbaseDefaultBlockSize());
    } else {
        cf.setBlocksize(kylinConfig.getHbaseSmallFamilyBlockSize());
    }

    String hbaseDefaultCC = kylinConfig.getHbaseDefaultCompressionCodec().toLowerCase(Locale.ROOT);
    switch (hbaseDefaultCC) {
    case "snappy": {
        logger.info("hbase will use snappy to compress data");
        cf.setCompressionType(Algorithm.SNAPPY);
        break;
    }
    case "lzo": {
        logger.info("hbase will use lzo to compress data");
        cf.setCompressionType(Algorithm.LZO);
        break;
    }
    case "gz":
    case "gzip": {
        logger.info("hbase will use gzip to compress data");
        cf.setCompressionType(Algorithm.GZ);
        break;
    }
    case "lz4": {
        logger.info("hbase will use lz4 to compress data");
        cf.setCompressionType(Algorithm.LZ4);
        break;
    }
    case "none":
    default: {
        logger.info("hbase will not use any compression algorithm to compress data");
        cf.setCompressionType(Algorithm.NONE);
    }
    }

    try {
        String encodingStr = kylinConfig.getHbaseDefaultEncoding();
        DataBlockEncoding encoding = DataBlockEncoding.valueOf(encodingStr);
        cf.setDataBlockEncoding(encoding);
    } catch (Exception e) {
        logger.info("hbase will not use any encoding", e);
        cf.setDataBlockEncoding(DataBlockEncoding.NONE);
    }

    cf.setInMemory(false);
    cf.setBloomFilterType(BloomType.NONE);
    cf.setScope(kylinConfig.getHBaseReplicationScope());
    return cf;
}
 
Example 7
Source File: CubeHTableUtil.java    From kylin with Apache License 2.0 4 votes vote down vote up
public static HColumnDescriptor createColumnFamily(KylinConfig kylinConfig, String cfName, boolean isMemoryHungry) {
    HColumnDescriptor cf = new HColumnDescriptor(cfName);
    cf.setMaxVersions(1);

    if (isMemoryHungry) {
        cf.setBlocksize(kylinConfig.getHbaseDefaultBlockSize());
    } else {
        cf.setBlocksize(kylinConfig.getHbaseSmallFamilyBlockSize());
    }

    String hbaseDefaultCC = kylinConfig.getHbaseDefaultCompressionCodec().toLowerCase(Locale.ROOT);
    switch (hbaseDefaultCC) {
    case "snappy": {
        logger.info("hbase will use snappy to compress data");
        cf.setCompressionType(Algorithm.SNAPPY);
        break;
    }
    case "lzo": {
        logger.info("hbase will use lzo to compress data");
        cf.setCompressionType(Algorithm.LZO);
        break;
    }
    case "gz":
    case "gzip": {
        logger.info("hbase will use gzip to compress data");
        cf.setCompressionType(Algorithm.GZ);
        break;
    }
    case "lz4": {
        logger.info("hbase will use lz4 to compress data");
        cf.setCompressionType(Algorithm.LZ4);
        break;
    }
    case "none":
    default: {
        logger.info("hbase will not use any compression algorithm to compress data");
        cf.setCompressionType(Algorithm.NONE);
    }
    }

    try {
        String encodingStr = kylinConfig.getHbaseDefaultEncoding();
        DataBlockEncoding encoding = DataBlockEncoding.valueOf(encodingStr);
        cf.setDataBlockEncoding(encoding);
    } catch (Exception e) {
        logger.info("hbase will not use any encoding", e);
        cf.setDataBlockEncoding(DataBlockEncoding.NONE);
    }

    cf.setInMemory(false);
    cf.setBloomFilterType(BloomType.NONE);
    cf.setScope(kylinConfig.getHBaseReplicationScope());
    return cf;
}