Java Code Examples for org.apache.hadoop.hbase.io.compress.Compression

The following examples show how to use org.apache.hadoop.hbase.io.compress.Compression. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: flinkDemo   Source File: HBaseUtils.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * 创建一个表,这个表没有任何region
 *
 * @param tableName 表名
 * @param cfs       列族
 * @throws Exception Exception
 */
public static void createTable(String tableName, String... cfs) throws Exception {
    Admin admin = null;
    try {
        admin = HBaseUtils.getConnection().getAdmin();
        HTableDescriptor hTableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));
        for (String family : cfs) {
            HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(family);
            hColumnDescriptor.setCompressionType(Compression.Algorithm.SNAPPY);
            hTableDescriptor.addFamily(hColumnDescriptor);
            hColumnDescriptor.setMaxVersions(3);
        }
        admin.createTable(hTableDescriptor);
        LOGGER.info("create table " + tableName + " seccuss.");
    } finally {
        HBaseUtils.closeAdmin(admin);
    }
}
 
Example 2
Source Project: atlas   Source File: HBaseBasedAuditRepository.java    License: Apache License 2.0 6 votes vote down vote up
private void createTableIfNotExists() throws AtlasException {
    Admin admin = null;
    try {
        admin = connection.getAdmin();
        LOG.info("Checking if table {} exists", tableName.getNameAsString());
        if (!admin.tableExists(tableName)) {
            LOG.info("Creating table {}", tableName.getNameAsString());
            HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
            HColumnDescriptor columnFamily = new HColumnDescriptor(COLUMN_FAMILY);
            columnFamily.setMaxVersions(1);
            columnFamily.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
            columnFamily.setCompressionType(Compression.Algorithm.GZ);
            columnFamily.setBloomFilterType(BloomType.ROW);
            tableDescriptor.addFamily(columnFamily);
            admin.createTable(tableDescriptor);
        } else {
            LOG.info("Table {} exists", tableName.getNameAsString());
        }
    } catch (IOException e) {
        throw new AtlasException(e);
    } finally {
        close(admin);
    }
}
 
Example 3
Source Project: incubator-atlas   Source File: HBaseBasedAuditRepository.java    License: Apache License 2.0 6 votes vote down vote up
private void createTableIfNotExists() throws AtlasException {
    Admin admin = null;
    try {
        admin = connection.getAdmin();
        LOG.info("Checking if table {} exists", tableName.getNameAsString());
        if (!admin.tableExists(tableName)) {
            LOG.info("Creating table {}", tableName.getNameAsString());
            HTableDescriptor tableDescriptor = new HTableDescriptor(tableName);
            HColumnDescriptor columnFamily = new HColumnDescriptor(COLUMN_FAMILY);
            columnFamily.setMaxVersions(1);
            columnFamily.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF);
            columnFamily.setCompressionType(Compression.Algorithm.GZ);
            columnFamily.setBloomFilterType(BloomType.ROW);
            tableDescriptor.addFamily(columnFamily);
            admin.createTable(tableDescriptor);
        } else {
            LOG.info("Table {} exists", tableName.getNameAsString());
        }
    } catch (IOException e) {
        throw new AtlasException(e);
    } finally {
        close(admin);
    }
}
 
Example 4
Source Project: hbase   Source File: ThriftUtilities.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * This utility method creates a new Hbase HColumnDescriptor object based on a
 * Thrift ColumnDescriptor "struct".
 *
 * @param in Thrift ColumnDescriptor object
 * @return ModifyableColumnFamilyDescriptor
 * @throws IllegalArgument if the column name is empty
 */
static public ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor colDescFromThrift(
    ColumnDescriptor in) throws IllegalArgument {
  Compression.Algorithm comp =
    Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT));
  BloomType bt =
    BloomType.valueOf(in.bloomFilterType);

  if (in.name == null || !in.name.hasRemaining()) {
    throw new IllegalArgument("column name is empty");
  }
  byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0];
  ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor familyDescriptor =
    new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(parsedName)
      .setMaxVersions(in.maxVersions)
      .setCompressionType(comp)
      .setInMemory(in.inMemory)
      .setBlockCacheEnabled(in.blockCacheEnabled)
      .setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE)
      .setBloomFilterType(bt);
  return familyDescriptor;
}
 
Example 5
Source Project: hbase   Source File: CompressionTest.java    License: Apache License 2.0 6 votes vote down vote up
public static boolean testCompression(String codec) {
  codec = codec.toLowerCase(Locale.ROOT);

  Compression.Algorithm a;

  try {
    a = Compression.getCompressionAlgorithmByName(codec);
  } catch (IllegalArgumentException e) {
    LOG.warn("Codec type: " + codec + " is not known");
    return false;
  }

  try {
    testCompression(a);
    return true;
  } catch (IOException ignored) {
    LOG.warn("Can't instantiate codec: " + codec, ignored);
    return false;
  }
}
 
Example 6
Source Project: hbase   Source File: CompressionTest.java    License: Apache License 2.0 6 votes vote down vote up
public static void testCompression(Compression.Algorithm algo)
    throws IOException {
  if (compressionTestResults[algo.ordinal()] != null) {
    if (compressionTestResults[algo.ordinal()]) {
      return ; // already passed test, dont do it again.
    } else {
      // failed.
      throw new DoNotRetryIOException("Compression algorithm '" + algo.getName() + "'" +
      " previously failed test.");
    }
  }

  try {
    Compressor c = algo.getCompressor();
    algo.returnCompressor(c);
    compressionTestResults[algo.ordinal()] = true; // passes
  } catch (Throwable t) {
    compressionTestResults[algo.ordinal()] = false; // failure
    throw new DoNotRetryIOException(t);
  }
}
 
Example 7
Source Project: hbase   Source File: TestHFileOutputFormat2.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * @return a map from column family names to compression algorithms for
 *         testing column family compression. Column family names have special characters
 */
private Map<String, Compression.Algorithm>
    getMockColumnFamiliesForCompression (int numCfs) {
  Map<String, Compression.Algorithm> familyToCompression = new HashMap<>();
  // use column family names having special characters
  if (numCfs-- > 0) {
    familyToCompression.put("[email protected]#[email protected]#&", Compression.Algorithm.LZO);
  }
  if (numCfs-- > 0) {
    familyToCompression.put("Family2=asdads&!AASD", Compression.Algorithm.SNAPPY);
  }
  if (numCfs-- > 0) {
    familyToCompression.put("Family2=asdads&!AASD", Compression.Algorithm.GZ);
  }
  if (numCfs-- > 0) {
    familyToCompression.put("Family3", Compression.Algorithm.NONE);
  }
  return familyToCompression;
}
 
Example 8
Source Project: hbase   Source File: HStore.java    License: Apache License 2.0 6 votes vote down vote up
private HFileContext createFileContext(Compression.Algorithm compression,
    boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {
  if (compression == null) {
    compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;
  }
  HFileContext hFileContext = new HFileContextBuilder()
                              .withIncludesMvcc(includeMVCCReadpoint)
                              .withIncludesTags(includesTag)
                              .withCompression(compression)
                              .withCompressTags(family.isCompressTags())
                              .withChecksumType(checksumType)
                              .withBytesPerCheckSum(bytesPerChecksum)
                              .withBlockSize(blocksize)
                              .withHBaseCheckSum(true)
                              .withDataBlockEncoding(family.getDataBlockEncoding())
                              .withEncryptionContext(cryptoContext)
                              .withCreateTime(EnvironmentEdgeManager.currentTime())
                              .withColumnFamily(family.getName())
                              .withTableName(region.getTableDescriptor()
                                  .getTableName().getName())
                              .withCellComparator(this.comparator)
                              .build();
  return hFileContext;
}
 
Example 9
Source Project: hbase   Source File: FixedFileTrailer.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Deserialize the file trailer as writable data
 */
void deserializeFromWritable(DataInput input) throws IOException {
  fileInfoOffset = input.readLong();
  loadOnOpenDataOffset = input.readLong();
  dataIndexCount = input.readInt();
  uncompressedDataIndexSize = input.readLong();
  metaIndexCount = input.readInt();

  totalUncompressedBytes = input.readLong();
  entryCount = input.readLong();
  compressionCodec = Compression.Algorithm.values()[input.readInt()];
  numDataIndexLevels = input.readInt();
  firstDataBlockOffset = input.readLong();
  lastDataBlockOffset = input.readLong();
  // TODO this is a classname encoded into an  HFile's trailer. We are going to need to have 
  // some compat code here.
  setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input,
    MAX_COMPARATOR_NAME_LENGTH)));
}
 
Example 10
/**
 *
 * This is borrowed from DefaultCompactor.
 *
 * @param compression
 * @param includeMVCCReadpoint
 * @param includesTag
 * @param cryptoContext
 * @return
 */
private HFileContext createFileContext(Compression.Algorithm compression,
                                       boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) {
    if (compression == null) {
        compression = HFile.DEFAULT_COMPRESSION_ALGORITHM;
    }
    HFileContext hFileContext = new HFileContextBuilder()
            .withIncludesMvcc(includeMVCCReadpoint)
            .withIncludesTags(includesTag)
            .withCompression(compression)
            .withCompressTags(store.getColumnFamilyDescriptor().isCompressTags())
            .withChecksumType(HStore.getChecksumType(conf))
            .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
            .withBlockSize(store.getColumnFamilyDescriptor().getBlocksize())
            .withHBaseCheckSum(true)
            .withDataBlockEncoding(store.getColumnFamilyDescriptor().getDataBlockEncoding())
            .withEncryptionContext(cryptoContext)
            .withCreateTime(EnvironmentEdgeManager.currentTime())
            .build();
    return hFileContext;
}
 
Example 11
Source Project: hbase   Source File: HBaseTestingUtility.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Create a set of column descriptors with the combination of compression,
 * encoding, bloom codecs available.
 * @param prefix family names prefix
 * @return the list of column descriptors
 */
public static List<ColumnFamilyDescriptor> generateColumnDescriptors(final String prefix) {
  List<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<>();
  long familyId = 0;
  for (Compression.Algorithm compressionType: getSupportedCompressionAlgorithms()) {
    for (DataBlockEncoding encodingType: DataBlockEncoding.values()) {
      for (BloomType bloomType: BloomType.values()) {
        String name = String.format("%[email protected]#&-%[email protected]#", prefix, familyId);
        ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder =
          ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(name));
        columnFamilyDescriptorBuilder.setCompressionType(compressionType);
        columnFamilyDescriptorBuilder.setDataBlockEncoding(encodingType);
        columnFamilyDescriptorBuilder.setBloomFilterType(bloomType);
        columnFamilyDescriptors.add(columnFamilyDescriptorBuilder.build());
        familyId++;
      }
    }
  }
  return columnFamilyDescriptors;
}
 
Example 12
Source Project: hbase   Source File: TestScanWithBloomError.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testThreeStoreFiles() throws IOException {
  ColumnFamilyDescriptor columnFamilyDescriptor =
    ColumnFamilyDescriptorBuilder
      .newBuilder(Bytes.toBytes(FAMILY))
      .setCompressionType(Compression.Algorithm.GZ)
      .setBloomFilterType(bloomType)
      .setMaxVersions(TestMultiColumnScanner.MAX_VERSIONS).build();
  region = TEST_UTIL.createTestRegion(TABLE_NAME, columnFamilyDescriptor);
  createStoreFile(new int[] {1, 2, 6});
  createStoreFile(new int[] {1, 2, 3, 7});
  createStoreFile(new int[] {1, 9});
  scanColSet(new int[]{1, 4, 6, 7}, new int[]{1, 6, 7});

  HBaseTestingUtility.closeRegionAndWAL(region);
}
 
Example 13
Source Project: hbase   Source File: TestBlocksScanned.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testBlocksScannedWithEncoding() throws Exception {
  byte [] tableName = Bytes.toBytes("TestBlocksScannedWithEncoding");
  TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor =
    new TableDescriptorBuilder.ModifyableTableDescriptor(TableName.valueOf(tableName));

  tableDescriptor.setColumnFamily(
      new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY)
      .setMaxVersions(10)
      .setBlockCacheEnabled(true)
      .setDataBlockEncoding(DataBlockEncoding.FAST_DIFF)
      .setBlocksize(BLOCK_SIZE)
      .setCompressionType(Compression.Algorithm.NONE)
      );
  _testBlocksScanned(tableDescriptor);
}
 
Example 14
Source Project: hbase   Source File: TestHFileBlock.java    License: Apache License 2.0 6 votes vote down vote up
static HFileBlock.Writer createTestV2Block(Compression.Algorithm algo,
    boolean includesMemstoreTS, boolean includesTag) throws IOException {
  final BlockType blockType = BlockType.DATA;
  HFileContext meta = new HFileContextBuilder()
                      .withCompression(algo)
                      .withIncludesMvcc(includesMemstoreTS)
                      .withIncludesTags(includesTag)
                      .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM)
                      .build();
  HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta);
  DataOutputStream dos = hbw.startWriting(blockType);
  writeTestBlockContents(dos);
  dos.flush();
  hbw.ensureBlockReady();
  assertEquals(1000 * 4, hbw.getUncompressedSizeWithoutHeader());
  hbw.release();
  return hbw;
}
 
Example 15
Source Project: hbase   Source File: TestHFile.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testNullMetaBlocks() throws Exception {
  for (Compression.Algorithm compressAlgo :
      HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
    Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
    FSDataOutputStream fout = createFSOutput(mFile);
    HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo)
                        .withBlockSize(minBlockSize).build();
    Writer writer = HFile.getWriterFactory(conf, cacheConf)
        .withOutputStream(fout)
        .withFileContext(meta)
        .create();
    KeyValue kv = new KeyValue(Bytes.toBytes("foo"), Bytes.toBytes("f1"), null,
        Bytes.toBytes("value"));
    writer.append(kv);
    writer.close();
    fout.close();
    Reader reader = HFile.createReader(fs, mFile, cacheConf, true, conf);
    assertNull(reader.getMetaBlock("non-existant", false));
  }
}
 
Example 16
Source Project: hbase   Source File: TestSeekToBlockWithEncoders.java    License: Apache License 2.0 6 votes vote down vote up
private void seekToTheKey(KeyValue expected, List<KeyValue> kvs, Cell toSeek)
    throws IOException {
  // create all seekers
  List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>();
  for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
    if (encoding.getEncoder() == null) {
      continue;
    }
    DataBlockEncoder encoder = encoding.getEncoder();
    HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false)
        .withIncludesMvcc(false).withIncludesTags(false)
        .withCompression(Compression.Algorithm.NONE).build();
    HFileBlockEncodingContext encodingContext = encoder.newDataBlockEncodingContext(encoding,
        HFILEBLOCK_DUMMY_HEADER, meta);
    ByteBuffer encodedBuffer = TestDataBlockEncoders.encodeKeyValues(encoding, kvs,
        encodingContext, this.useOffheapData);
    DataBlockEncoder.EncodedSeeker seeker =
      encoder.createSeeker(encoder.newDataBlockDecodingContext(meta));
    seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
    encodedSeekers.add(seeker);
  }
  // test it!
  // try a few random seeks
  checkSeekingConsistency(encodedSeekers, toSeek, expected);
}
 
Example 17
Source Project: hbase   Source File: HFileContext.java    License: Apache License 2.0 6 votes vote down vote up
HFileContext(boolean useHBaseChecksum, boolean includesMvcc, boolean includesTags,
             Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType,
             int bytesPerChecksum, int blockSize, DataBlockEncoding encoding,
             Encryption.Context cryptoContext, long fileCreateTime, String hfileName,
             byte[] columnFamily, byte[] tableName, CellComparator cellComparator) {
  this.usesHBaseChecksum = useHBaseChecksum;
  this.includesMvcc =  includesMvcc;
  this.includesTags = includesTags;
  this.compressAlgo = compressAlgo;
  this.compressTags = compressTags;
  this.checksumType = checksumType;
  this.bytesPerChecksum = bytesPerChecksum;
  this.blocksize = blockSize;
  if (encoding != null) {
    this.encoding = encoding;
  }
  this.cryptoContext = cryptoContext;
  this.fileCreateTime = fileCreateTime;
  this.hfileName = hfileName;
  this.columnFamily = columnFamily;
  this.tableName = tableName;
  // If no cellComparator specified, make a guess based off tablename. If hbase:meta, then should
  // be the meta table comparator. Comparators are per table.
  this.cellComparator = cellComparator != null ? cellComparator : this.tableName != null ?
    CellComparatorImpl.getCellComparator(this.tableName) : CellComparator.getInstance();
}
 
Example 18
Source Project: hbase   Source File: TestDataBlockEncoders.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Test whether the decompression of first key is implemented correctly.
 * @throws IOException
 */
@Test
public void testFirstKeyInBlockOnSample() throws IOException {
  List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);

  for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
    if (encoding.getEncoder() == null) {
      continue;
    }
    DataBlockEncoder encoder = encoding.getEncoder();
    ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv,
        getEncodingContext(Compression.Algorithm.NONE, encoding), this.useOffheapData);
    Cell key = encoder.getFirstKeyCellInBlock(new SingleByteBuff(encodedBuffer));
    KeyValue firstKv = sampleKv.get(0);
    if (0 != PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, key, firstKv)) {
      int commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(key, firstKv, false, true);
      fail(String.format("Bug in '%s' commonPrefix %d", encoder.toString(), commonPrefix));
    }
  }
}
 
Example 19
Source Project: hbase   Source File: TestHFileOutputFormat2.java    License: Apache License 2.0 6 votes vote down vote up
private void setupMockColumnFamiliesForCompression(Table table,
    Map<String, Compression.Algorithm> familyToCompression) throws IOException {

  TableDescriptorBuilder mockTableDescriptor =
    TableDescriptorBuilder.newBuilder(TABLE_NAMES[0]);
  for (Entry<String, Compression.Algorithm> entry : familyToCompression.entrySet()) {
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder
      .newBuilder(Bytes.toBytes(entry.getKey()))
      .setMaxVersions(1)
      .setCompressionType(entry.getValue())
      .setBlockCacheEnabled(false)
      .setTimeToLive(0)
      .build();

    mockTableDescriptor.setColumnFamily(columnFamilyDescriptor);
  }
  Mockito.doReturn(mockTableDescriptor.build()).when(table).getDescriptor();
}
 
Example 20
Source Project: PoseidonX   Source File: HTableOperatorImpl.java    License: Apache License 2.0 5 votes vote down vote up
private HColumnDescriptor changeCd(ColumnDescriptor cd){
    HColumnDescriptor family = new HColumnDescriptor(cd.getFamilyName());
    if(cd.isCompress()){
        family.setCompactionCompressionType(Compression.Algorithm.GZ);
    }
    return family ;
}
 
Example 21
Source Project: hbase   Source File: ThriftUtilities.java    License: Apache License 2.0 5 votes vote down vote up
public static Compression.Algorithm compressionAlgorithmFromThrift(TCompressionAlgorithm in) {
  switch (in.getValue()) {
    case 0: return Compression.Algorithm.LZO;
    case 1: return Compression.Algorithm.GZ;
    case 2: return Compression.Algorithm.NONE;
    case 3: return Compression.Algorithm.SNAPPY;
    case 4: return Compression.Algorithm.LZ4;
    case 5: return Compression.Algorithm.BZIP2;
    case 6: return Compression.Algorithm.ZSTD;
    default: return Compression.Algorithm.NONE;
  }
}
 
Example 22
Source Project: hbase   Source File: ThriftUtilities.java    License: Apache License 2.0 5 votes vote down vote up
public static TCompressionAlgorithm compressionAlgorithmFromHBase(Compression.Algorithm in) {
  switch (in) {
    case LZO: return TCompressionAlgorithm.LZO;
    case GZ: return TCompressionAlgorithm.GZ;
    case NONE: return TCompressionAlgorithm.NONE;
    case SNAPPY: return TCompressionAlgorithm.SNAPPY;
    case LZ4: return TCompressionAlgorithm.LZ4;
    case BZIP2: return TCompressionAlgorithm.BZIP2;
    case ZSTD: return TCompressionAlgorithm.ZSTD;
    default: return TCompressionAlgorithm.NONE;
  }
}
 
Example 23
public static HColumnDescriptor createDataFamily() {
    HColumnDescriptor snapshot = new HColumnDescriptor(SIConstants.DEFAULT_FAMILY_BYTES);
    snapshot.setMaxVersions(Integer.MAX_VALUE);
    snapshot.setCompressionType(Compression.Algorithm.NONE);
    snapshot.setInMemory(true);
    snapshot.setBlockCacheEnabled(true);
    snapshot.setBloomFilterType(BloomType.ROW);
    return snapshot;
}
 
Example 24
Source Project: hbase   Source File: HMobStore.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Creates the writer for the mob file in temp directory.
 * @param date The latest date of written cells.
 * @param maxKeyCount The key count.
 * @param compression The compression algorithm.
 * @param startKey The start key.
 * @param isCompaction If the writer is used in compaction.
 * @return The writer for the mob file.
 * @throws IOException
 */
public StoreFileWriter createWriterInTmp(Date date, long maxKeyCount,
    Compression.Algorithm compression, byte[] startKey,
    boolean isCompaction) throws IOException {
  if (startKey == null) {
    startKey = HConstants.EMPTY_START_ROW;
  }
  Path path = getTempDir();
  return createWriterInTmp(MobUtils.formatDate(date), path, maxKeyCount, compression, startKey,
    isCompaction);
}
 
Example 25
Source Project: hbase   Source File: Compactor.java    License: Apache License 2.0 5 votes vote down vote up
Compactor(Configuration conf, HStore store) {
  this.conf = conf;
  this.store = store;
  this.compactionKVMax =
    this.conf.getInt(HConstants.COMPACTION_KV_MAX, HConstants.COMPACTION_KV_MAX_DEFAULT);
  this.compactionCompression = (this.store.getColumnFamilyDescriptor() == null) ?
      Compression.Algorithm.NONE : this.store.getColumnFamilyDescriptor().getCompactionCompressionType();
  this.keepSeqIdPeriod = Math.max(this.conf.getInt(HConstants.KEEP_SEQID_PERIOD,
    HConstants.MIN_KEEP_SEQID_PERIOD), HConstants.MIN_KEEP_SEQID_PERIOD);
  this.dropCacheMajor = conf.getBoolean(MAJOR_COMPACTION_DROP_CACHE, true);
  this.dropCacheMinor = conf.getBoolean(MINOR_COMPACTION_DROP_CACHE, true);
}
 
Example 26
private static HTableDescriptor generateTransactionTable() throws IOException{
    HTableDescriptor desc = new HTableDescriptor(TableName.valueOf("splice",HConfiguration.TRANSACTION_TABLE));
    desc.addCoprocessor(TxnLifecycleEndpoint.class.getName());

    HColumnDescriptor columnDescriptor = new HColumnDescriptor(SIConstants.DEFAULT_FAMILY_BYTES);
    columnDescriptor.setMaxVersions(5);
    columnDescriptor.setCompressionType(Compression.Algorithm.NONE);
    columnDescriptor.setInMemory(true);
    columnDescriptor.setBlockCacheEnabled(true);
    columnDescriptor.setBloomFilterType(BloomType.ROWCOL);
    desc.addFamily(columnDescriptor);
    desc.addFamily(new HColumnDescriptor(Bytes.toBytes(SIConstants.SI_PERMISSION_FAMILY)));
    return desc;
}
 
Example 27
Source Project: pinpoint   Source File: ProgramOptions.java    License: Apache License 2.0 5 votes vote down vote up
private static String getCompression(ApplicationArguments args) {
    List<String> compressions = args.getOptionValues(COMPRESSION);
    if (CollectionUtils.isEmpty(compressions)) {
        return Compression.Algorithm.NONE.getName();
    }
    String compression = compressions.get(0);
    if (StringUtils.isEmpty(compression)) {
        return Compression.Algorithm.NONE.getName();
    }
    return compression;
}
 
Example 28
Source Project: foxtrot   Source File: HbaseTableConnection.java    License: Apache License 2.0 5 votes vote down vote up
private HTableDescriptor constructHTableDescriptor(final Table table) {
    String tableName = TableUtil.getTableName(hbaseConfig, table);

    HTableDescriptor hTableDescriptor = new HTableDescriptor(TableName.valueOf(tableName));
    HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(DEFAULT_FAMILY_NAME);
    hColumnDescriptor.setCompressionType(Compression.Algorithm.GZ);
    hColumnDescriptor.setTimeToLive(Math.toIntExact(TimeUnit.DAYS.toSeconds(table.getTtl())));
    hTableDescriptor.addFamily(hColumnDescriptor);
    return hTableDescriptor;
}
 
Example 29
Source Project: hbase   Source File: TestHStore.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testHFileContextSetWithCFAndTable() throws Exception {
  init(this.name.getMethodName());
  StoreFileWriter writer = store.createWriterInTmp(10000L,
      Compression.Algorithm.NONE, false, true, false, true);
  HFileContext hFileContext = writer.getHFileWriter().getFileContext();
  assertArrayEquals(family, hFileContext.getColumnFamily());
  assertArrayEquals(table, hFileContext.getTableName());
}
 
Example 30
Source Project: SparkOnALog   Source File: HBaseCreateTable.java    License: Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws IOException {
	if (args.length == 0) {
		System.out.println("CreateTable {tableName} {columnFamilyName}");
		return;
	}

	String tableName = args[0];
	String columnFamilyName = args[1];

	HBaseAdmin admin = new HBaseAdmin(new Configuration());

	HTableDescriptor tableDescriptor = new HTableDescriptor(); 
	tableDescriptor.setName(Bytes.toBytes(tableName));

	HColumnDescriptor columnDescriptor = new HColumnDescriptor(columnFamilyName);

	columnDescriptor.setCompressionType(Compression.Algorithm.SNAPPY);
	columnDescriptor.setBlocksize(64 * 1024);
	columnDescriptor.setBloomFilterType(BloomType.ROW);

	tableDescriptor.addFamily(columnDescriptor);

	//tableDescriptor.setValue(tableDescriptor.SPLIT_POLICY, ConstantSizeRegionSplitPolicy.class.getName());

	System.out.println("-Creating Table");
	admin.createTable(tableDescriptor);

	admin.close();
	System.out.println("-Done");
}