Java Code Examples for org.apache.hadoop.hbase.regionserver.BloomType#ROW

The following examples show how to use org.apache.hadoop.hbase.regionserver.BloomType#ROW . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: BloomFilterFactory.java    From hbase with Apache License 2.0 6 votes vote down vote up
/**
 * Creates a new Delete Family Bloom filter at the time of
 * {@link org.apache.hadoop.hbase.regionserver.HStoreFile} writing.
 * @param conf
 * @param cacheConf
 * @param maxKeys an estimate of the number of keys we expect to insert.
 *        Irrelevant if compound Bloom filters are enabled.
 * @param writer the HFile writer
 * @return the new Bloom filter, or null in case Bloom filters are disabled
 *         or when failed to create one.
 */
public static BloomFilterWriter createDeleteBloomAtWrite(Configuration conf,
    CacheConfig cacheConf, int maxKeys, HFile.Writer writer) {
  if (!isDeleteFamilyBloomEnabled(conf)) {
    LOG.info("Delete Bloom filters are disabled by configuration for "
        + writer.getPath()
        + (conf == null ? " (configuration is null)" : ""));
    return null;
  }

  float err = getErrorRate(conf);

  int maxFold = getMaxFold(conf);
  // In case of compound Bloom filters we ignore the maxKeys hint.
  CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
      err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
      null, BloomType.ROW);
  writer.addInlineBlockWriter(bloomWriter);
  return bloomWriter;
}
 
Example 2
Source File: ThriftUtilities.java    From hbase with Apache License 2.0 5 votes vote down vote up
public static BloomType bloomFilterFromThrift(TBloomFilterType in) {
  switch (in.getValue()) {
    case 0: return BloomType.NONE;
    case 1: return BloomType.ROW;
    case 2: return BloomType.ROWCOL;
    case 3: return BloomType.ROWPREFIX_FIXED_LENGTH;
    default: return BloomType.ROW;
  }
}
 
Example 3
Source File: LoadTestTool.java    From hbase with Apache License 2.0 5 votes vote down vote up
private void parseColumnFamilyOptions(CommandLine cmd) {
  String dataBlockEncodingStr = cmd.getOptionValue(HFileTestUtil.OPT_DATA_BLOCK_ENCODING);
  dataBlockEncodingAlgo = dataBlockEncodingStr == null ? null :
      DataBlockEncoding.valueOf(dataBlockEncodingStr);

  String compressStr = cmd.getOptionValue(OPT_COMPRESSION);
  compressAlgo = compressStr == null ? Compression.Algorithm.NONE :
      Compression.Algorithm.valueOf(compressStr);

  String bloomStr = cmd.getOptionValue(OPT_BLOOM);
  bloomType = bloomStr == null ? BloomType.ROW :
      BloomType.valueOf(bloomStr);

  if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {
    if (!cmd.hasOption(OPT_BLOOM_PARAM)) {
      LOG.error("the parameter of bloom filter {} is not specified", bloomType.name());
    } else {
      conf.set(BloomFilterUtil.PREFIX_LENGTH_KEY, cmd.getOptionValue(OPT_BLOOM_PARAM));
    }
  }

  inMemoryCF = cmd.hasOption(OPT_INMEMORY);
  if (cmd.hasOption(OPT_ENCRYPTION)) {
    cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));
  }

}
 
Example 4
Source File: BloomFilterChunk.java    From hbase with Apache License 2.0 3 votes vote down vote up
/**
 * Determines & initializes bloom filter meta data from user config. Call
 * {@link #allocBloom()} to allocate bloom filter data.
 *
 * @param maxKeys Maximum expected number of keys that will be stored in this
 *          bloom
 * @param errorRate Desired false positive error rate. Lower rate = more
 *          storage required
 * @param hashType Type of hash function to use
 * @param foldFactor When finished adding entries, you may be able to 'fold'
 *          this bloom to save space. Tradeoff potentially excess bytes in
 *          bloom for ability to fold if keyCount is exponentially greater
 *          than maxKeys.
 * @throws IllegalArgumentException
 */
// Used only in testcases
public BloomFilterChunk(int maxKeys, double errorRate, int hashType,
    int foldFactor) throws IllegalArgumentException {
  this(hashType, BloomType.ROW);

  long bitSize = BloomFilterUtil.computeBitSize(maxKeys, errorRate);
  hashCount = BloomFilterUtil.optimalFunctionCount(maxKeys, bitSize);
  this.maxKeys = maxKeys;

  // increase byteSize so folding is possible
  byteSize = BloomFilterUtil.computeFoldableByteSize(bitSize, foldFactor);

  sanityCheck();
}