org.apache.hadoop.hbase.io.compress.Compression Java Examples
The following examples show how to use
org.apache.hadoop.hbase.io.compress.Compression.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HBaseUtils.java From flinkDemo with Apache License 2.0 | 6 votes |
/** * 创建一个表,这个表没有任何region * * @param tableName 表名 * @param cfs 列族 * @throws Exception Exception */ public static void createTable(String tableName, String... cfs) throws Exception { Admin admin = null; try { admin = HBaseUtils.getConnection().getAdmin(); HTableDescriptor hTableDescriptor = new HTableDescriptor(TableName.valueOf(tableName)); for (String family : cfs) { HColumnDescriptor hColumnDescriptor = new HColumnDescriptor(family); hColumnDescriptor.setCompressionType(Compression.Algorithm.SNAPPY); hTableDescriptor.addFamily(hColumnDescriptor); hColumnDescriptor.setMaxVersions(3); } admin.createTable(hTableDescriptor); LOGGER.info("create table " + tableName + " seccuss."); } finally { HBaseUtils.closeAdmin(admin); } }
Example #2
Source File: HStore.java From hbase with Apache License 2.0 | 6 votes |
private HFileContext createFileContext(Compression.Algorithm compression, boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) { if (compression == null) { compression = HFile.DEFAULT_COMPRESSION_ALGORITHM; } HFileContext hFileContext = new HFileContextBuilder() .withIncludesMvcc(includeMVCCReadpoint) .withIncludesTags(includesTag) .withCompression(compression) .withCompressTags(family.isCompressTags()) .withChecksumType(checksumType) .withBytesPerCheckSum(bytesPerChecksum) .withBlockSize(blocksize) .withHBaseCheckSum(true) .withDataBlockEncoding(family.getDataBlockEncoding()) .withEncryptionContext(cryptoContext) .withCreateTime(EnvironmentEdgeManager.currentTime()) .withColumnFamily(family.getName()) .withTableName(region.getTableDescriptor() .getTableName().getName()) .withCellComparator(this.comparator) .build(); return hFileContext; }
Example #3
Source File: TestHFileOutputFormat2.java From hbase with Apache License 2.0 | 6 votes |
/** * @return a map from column family names to compression algorithms for * testing column family compression. Column family names have special characters */ private Map<String, Compression.Algorithm> getMockColumnFamiliesForCompression (int numCfs) { Map<String, Compression.Algorithm> familyToCompression = new HashMap<>(); // use column family names having special characters if (numCfs-- > 0) { familyToCompression.put("Family1!@#!@#&", Compression.Algorithm.LZO); } if (numCfs-- > 0) { familyToCompression.put("Family2=asdads&!AASD", Compression.Algorithm.SNAPPY); } if (numCfs-- > 0) { familyToCompression.put("Family2=asdads&!AASD", Compression.Algorithm.GZ); } if (numCfs-- > 0) { familyToCompression.put("Family3", Compression.Algorithm.NONE); } return familyToCompression; }
Example #4
Source File: TestHFileBlock.java From hbase with Apache License 2.0 | 6 votes |
static HFileBlock.Writer createTestV2Block(Compression.Algorithm algo, boolean includesMemstoreTS, boolean includesTag) throws IOException { final BlockType blockType = BlockType.DATA; HFileContext meta = new HFileContextBuilder() .withCompression(algo) .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(includesTag) .withBytesPerCheckSum(HFile.DEFAULT_BYTES_PER_CHECKSUM) .build(); HFileBlock.Writer hbw = new HFileBlock.Writer(null, meta); DataOutputStream dos = hbw.startWriting(blockType); writeTestBlockContents(dos); dos.flush(); hbw.ensureBlockReady(); assertEquals(1000 * 4, hbw.getUncompressedSizeWithoutHeader()); hbw.release(); return hbw; }
Example #5
Source File: FixedFileTrailer.java From hbase with Apache License 2.0 | 6 votes |
/** * Deserialize the file trailer as writable data */ void deserializeFromWritable(DataInput input) throws IOException { fileInfoOffset = input.readLong(); loadOnOpenDataOffset = input.readLong(); dataIndexCount = input.readInt(); uncompressedDataIndexSize = input.readLong(); metaIndexCount = input.readInt(); totalUncompressedBytes = input.readLong(); entryCount = input.readLong(); compressionCodec = Compression.Algorithm.values()[input.readInt()]; numDataIndexLevels = input.readInt(); firstDataBlockOffset = input.readLong(); lastDataBlockOffset = input.readLong(); // TODO this is a classname encoded into an HFile's trailer. We are going to need to have // some compat code here. setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input, MAX_COMPARATOR_NAME_LENGTH))); }
Example #6
Source File: TestBlocksScanned.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testBlocksScannedWithEncoding() throws Exception { byte [] tableName = Bytes.toBytes("TestBlocksScannedWithEncoding"); TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TableName.valueOf(tableName)); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY) .setMaxVersions(10) .setBlockCacheEnabled(true) .setDataBlockEncoding(DataBlockEncoding.FAST_DIFF) .setBlocksize(BLOCK_SIZE) .setCompressionType(Compression.Algorithm.NONE) ); _testBlocksScanned(tableDescriptor); }
Example #7
Source File: SpliceDefaultCompactor.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
/** * * This is borrowed from DefaultCompactor. * * @param compression * @param includeMVCCReadpoint * @param includesTag * @param cryptoContext * @return */ private HFileContext createFileContext(Compression.Algorithm compression, boolean includeMVCCReadpoint, boolean includesTag, Encryption.Context cryptoContext) { if (compression == null) { compression = HFile.DEFAULT_COMPRESSION_ALGORITHM; } HFileContext hFileContext = new HFileContextBuilder() .withIncludesMvcc(includeMVCCReadpoint) .withIncludesTags(includesTag) .withCompression(compression) .withCompressTags(store.getColumnFamilyDescriptor().isCompressTags()) .withChecksumType(HStore.getChecksumType(conf)) .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)) .withBlockSize(store.getColumnFamilyDescriptor().getBlocksize()) .withHBaseCheckSum(true) .withDataBlockEncoding(store.getColumnFamilyDescriptor().getDataBlockEncoding()) .withEncryptionContext(cryptoContext) .withCreateTime(EnvironmentEdgeManager.currentTime()) .build(); return hFileContext; }
Example #8
Source File: CompressionTest.java From hbase with Apache License 2.0 | 6 votes |
public static void testCompression(Compression.Algorithm algo) throws IOException { if (compressionTestResults[algo.ordinal()] != null) { if (compressionTestResults[algo.ordinal()]) { return ; // already passed test, dont do it again. } else { // failed. throw new DoNotRetryIOException("Compression algorithm '" + algo.getName() + "'" + " previously failed test."); } } try { Compressor c = algo.getCompressor(); algo.returnCompressor(c); compressionTestResults[algo.ordinal()] = true; // passes } catch (Throwable t) { compressionTestResults[algo.ordinal()] = false; // failure throw new DoNotRetryIOException(t); } }
Example #9
Source File: CompressionTest.java From hbase with Apache License 2.0 | 6 votes |
public static boolean testCompression(String codec) { codec = codec.toLowerCase(Locale.ROOT); Compression.Algorithm a; try { a = Compression.getCompressionAlgorithmByName(codec); } catch (IllegalArgumentException e) { LOG.warn("Codec type: " + codec + " is not known"); return false; } try { testCompression(a); return true; } catch (IOException ignored) { LOG.warn("Can't instantiate codec: " + codec, ignored); return false; } }
Example #10
Source File: TestScanWithBloomError.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testThreeStoreFiles() throws IOException { ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder .newBuilder(Bytes.toBytes(FAMILY)) .setCompressionType(Compression.Algorithm.GZ) .setBloomFilterType(bloomType) .setMaxVersions(TestMultiColumnScanner.MAX_VERSIONS).build(); region = TEST_UTIL.createTestRegion(TABLE_NAME, columnFamilyDescriptor); createStoreFile(new int[] {1, 2, 6}); createStoreFile(new int[] {1, 2, 3, 7}); createStoreFile(new int[] {1, 9}); scanColSet(new int[]{1, 4, 6, 7}, new int[]{1, 6, 7}); HBaseTestingUtility.closeRegionAndWAL(region); }
Example #11
Source File: ThriftUtilities.java From hbase with Apache License 2.0 | 6 votes |
/** * This utility method creates a new Hbase HColumnDescriptor object based on a * Thrift ColumnDescriptor "struct". * * @param in Thrift ColumnDescriptor object * @return ModifyableColumnFamilyDescriptor * @throws IllegalArgument if the column name is empty */ static public ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor colDescFromThrift( ColumnDescriptor in) throws IllegalArgument { Compression.Algorithm comp = Compression.getCompressionAlgorithmByName(in.compression.toLowerCase(Locale.ROOT)); BloomType bt = BloomType.valueOf(in.bloomFilterType); if (in.name == null || !in.name.hasRemaining()) { throw new IllegalArgument("column name is empty"); } byte [] parsedName = CellUtil.parseColumn(Bytes.getBytes(in.name))[0]; ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor familyDescriptor = new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(parsedName) .setMaxVersions(in.maxVersions) .setCompressionType(comp) .setInMemory(in.inMemory) .setBlockCacheEnabled(in.blockCacheEnabled) .setTimeToLive(in.timeToLive > 0 ? in.timeToLive : Integer.MAX_VALUE) .setBloomFilterType(bt); return familyDescriptor; }
Example #12
Source File: TestHFile.java From hbase with Apache License 2.0 | 6 votes |
@Test public void testNullMetaBlocks() throws Exception { for (Compression.Algorithm compressAlgo : HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) { Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile"); FSDataOutputStream fout = createFSOutput(mFile); HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo) .withBlockSize(minBlockSize).build(); Writer writer = HFile.getWriterFactory(conf, cacheConf) .withOutputStream(fout) .withFileContext(meta) .create(); KeyValue kv = new KeyValue(Bytes.toBytes("foo"), Bytes.toBytes("f1"), null, Bytes.toBytes("value")); writer.append(kv); writer.close(); fout.close(); Reader reader = HFile.createReader(fs, mFile, cacheConf, true, conf); assertNull(reader.getMetaBlock("non-existant", false)); } }
Example #13
Source File: HBaseTestingUtility.java From hbase with Apache License 2.0 | 6 votes |
/** * Create a set of column descriptors with the combination of compression, * encoding, bloom codecs available. * @param prefix family names prefix * @return the list of column descriptors */ public static List<ColumnFamilyDescriptor> generateColumnDescriptors(final String prefix) { List<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<>(); long familyId = 0; for (Compression.Algorithm compressionType: getSupportedCompressionAlgorithms()) { for (DataBlockEncoding encodingType: DataBlockEncoding.values()) { for (BloomType bloomType: BloomType.values()) { String name = String.format("%s-cf-!@#&-%d!@#", prefix, familyId); ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(name)); columnFamilyDescriptorBuilder.setCompressionType(compressionType); columnFamilyDescriptorBuilder.setDataBlockEncoding(encodingType); columnFamilyDescriptorBuilder.setBloomFilterType(bloomType); columnFamilyDescriptors.add(columnFamilyDescriptorBuilder.build()); familyId++; } } } return columnFamilyDescriptors; }
Example #14
Source File: TestSeekToBlockWithEncoders.java From hbase with Apache License 2.0 | 6 votes |
private void seekToTheKey(KeyValue expected, List<KeyValue> kvs, Cell toSeek) throws IOException { // create all seekers List<DataBlockEncoder.EncodedSeeker> encodedSeekers = new ArrayList<>(); for (DataBlockEncoding encoding : DataBlockEncoding.values()) { if (encoding.getEncoder() == null) { continue; } DataBlockEncoder encoder = encoding.getEncoder(); HFileContext meta = new HFileContextBuilder().withHBaseCheckSum(false) .withIncludesMvcc(false).withIncludesTags(false) .withCompression(Compression.Algorithm.NONE).build(); HFileBlockEncodingContext encodingContext = encoder.newDataBlockEncodingContext(encoding, HFILEBLOCK_DUMMY_HEADER, meta); ByteBuffer encodedBuffer = TestDataBlockEncoders.encodeKeyValues(encoding, kvs, encodingContext, this.useOffheapData); DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(meta)); seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer)); encodedSeekers.add(seeker); } // test it! // try a few random seeks checkSeekingConsistency(encodedSeekers, toSeek, expected); }
Example #15
Source File: HFileContext.java From hbase with Apache License 2.0 | 6 votes |
HFileContext(boolean useHBaseChecksum, boolean includesMvcc, boolean includesTags, Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType, int bytesPerChecksum, int blockSize, DataBlockEncoding encoding, Encryption.Context cryptoContext, long fileCreateTime, String hfileName, byte[] columnFamily, byte[] tableName, CellComparator cellComparator) { this.usesHBaseChecksum = useHBaseChecksum; this.includesMvcc = includesMvcc; this.includesTags = includesTags; this.compressAlgo = compressAlgo; this.compressTags = compressTags; this.checksumType = checksumType; this.bytesPerChecksum = bytesPerChecksum; this.blocksize = blockSize; if (encoding != null) { this.encoding = encoding; } this.cryptoContext = cryptoContext; this.fileCreateTime = fileCreateTime; this.hfileName = hfileName; this.columnFamily = columnFamily; this.tableName = tableName; // If no cellComparator specified, make a guess based off tablename. If hbase:meta, then should // be the meta table comparator. Comparators are per table. this.cellComparator = cellComparator != null ? cellComparator : this.tableName != null ? CellComparatorImpl.getCellComparator(this.tableName) : CellComparator.getInstance(); }
Example #16
Source File: HBaseBasedAuditRepository.java From incubator-atlas with Apache License 2.0 | 6 votes |
private void createTableIfNotExists() throws AtlasException { Admin admin = null; try { admin = connection.getAdmin(); LOG.info("Checking if table {} exists", tableName.getNameAsString()); if (!admin.tableExists(tableName)) { LOG.info("Creating table {}", tableName.getNameAsString()); HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); HColumnDescriptor columnFamily = new HColumnDescriptor(COLUMN_FAMILY); columnFamily.setMaxVersions(1); columnFamily.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); columnFamily.setCompressionType(Compression.Algorithm.GZ); columnFamily.setBloomFilterType(BloomType.ROW); tableDescriptor.addFamily(columnFamily); admin.createTable(tableDescriptor); } else { LOG.info("Table {} exists", tableName.getNameAsString()); } } catch (IOException e) { throw new AtlasException(e); } finally { close(admin); } }
Example #17
Source File: TestDataBlockEncoders.java From hbase with Apache License 2.0 | 6 votes |
/** * Test whether the decompression of first key is implemented correctly. * @throws IOException */ @Test public void testFirstKeyInBlockOnSample() throws IOException { List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags); for (DataBlockEncoding encoding : DataBlockEncoding.values()) { if (encoding.getEncoder() == null) { continue; } DataBlockEncoder encoder = encoding.getEncoder(); ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(Compression.Algorithm.NONE, encoding), this.useOffheapData); Cell key = encoder.getFirstKeyCellInBlock(new SingleByteBuff(encodedBuffer)); KeyValue firstKv = sampleKv.get(0); if (0 != PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, key, firstKv)) { int commonPrefix = PrivateCellUtil.findCommonPrefixInFlatKey(key, firstKv, false, true); fail(String.format("Bug in '%s' commonPrefix %d", encoder.toString(), commonPrefix)); } } }
Example #18
Source File: TestHFileOutputFormat2.java From hbase with Apache License 2.0 | 6 votes |
private void setupMockColumnFamiliesForCompression(Table table, Map<String, Compression.Algorithm> familyToCompression) throws IOException { TableDescriptorBuilder mockTableDescriptor = TableDescriptorBuilder.newBuilder(TABLE_NAMES[0]); for (Entry<String, Compression.Algorithm> entry : familyToCompression.entrySet()) { ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder .newBuilder(Bytes.toBytes(entry.getKey())) .setMaxVersions(1) .setCompressionType(entry.getValue()) .setBlockCacheEnabled(false) .setTimeToLive(0) .build(); mockTableDescriptor.setColumnFamily(columnFamilyDescriptor); } Mockito.doReturn(mockTableDescriptor.build()).when(table).getDescriptor(); }
Example #19
Source File: HBaseBasedAuditRepository.java From atlas with Apache License 2.0 | 6 votes |
private void createTableIfNotExists() throws AtlasException { Admin admin = null; try { admin = connection.getAdmin(); LOG.info("Checking if table {} exists", tableName.getNameAsString()); if (!admin.tableExists(tableName)) { LOG.info("Creating table {}", tableName.getNameAsString()); HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); HColumnDescriptor columnFamily = new HColumnDescriptor(COLUMN_FAMILY); columnFamily.setMaxVersions(1); columnFamily.setDataBlockEncoding(DataBlockEncoding.FAST_DIFF); columnFamily.setCompressionType(Compression.Algorithm.GZ); columnFamily.setBloomFilterType(BloomType.ROW); tableDescriptor.addFamily(columnFamily); admin.createTable(tableDescriptor); } else { LOG.info("Table {} exists", tableName.getNameAsString()); } } catch (IOException e) { throw new AtlasException(e); } finally { close(admin); } }
Example #20
Source File: HBaseConnectionFactory.java From spliceengine with GNU Affero General Public License v3.0 | 5 votes |
public HTableDescriptor generateTransactionTable(){ HTableDescriptor desc=new HTableDescriptor(TableName.valueOf(namespaceBytes, HConfiguration.TRANSACTION_TABLE_BYTES)); HColumnDescriptor columnDescriptor=new HColumnDescriptor(DEFAULT_FAMILY_BYTES); columnDescriptor.setMaxVersions(5); Compression.Algorithm compress=Compression.getCompressionAlgorithmByName(config.getCompressionAlgorithm()); columnDescriptor.setCompressionType(compress); columnDescriptor.setInMemory(HConfiguration.DEFAULT_IN_MEMORY); columnDescriptor.setBlockCacheEnabled(HConfiguration.DEFAULT_BLOCKCACHE); columnDescriptor.setBloomFilterType(BloomType.valueOf(HConfiguration.DEFAULT_BLOOMFILTER.toUpperCase())); columnDescriptor.setTimeToLive(HConfiguration.DEFAULT_TTL); desc.addFamily(columnDescriptor); desc.addFamily(new HColumnDescriptor(Bytes.toBytes(SI_PERMISSION_FAMILY))); return desc; }
Example #21
Source File: TestSecureBulkLoadManager.java From hbase with Apache License 2.0 | 5 votes |
private void prepareHFile(Path dir, byte[] key, byte[] value) throws Exception { TableDescriptor desc = testUtil.getAdmin().getDescriptor(TABLE); ColumnFamilyDescriptor family = desc.getColumnFamily(FAMILY); Compression.Algorithm compression = HFile.DEFAULT_COMPRESSION_ALGORITHM; CacheConfig writerCacheConf = new CacheConfig(conf, family, null, ByteBuffAllocator.HEAP); writerCacheConf.setCacheDataOnWrite(false); HFileContext hFileContext = new HFileContextBuilder() .withIncludesMvcc(false) .withIncludesTags(true) .withCompression(compression) .withCompressTags(family.isCompressTags()) .withChecksumType(HStore.getChecksumType(conf)) .withBytesPerCheckSum(HStore.getBytesPerChecksum(conf)) .withBlockSize(family.getBlocksize()) .withHBaseCheckSum(true) .withDataBlockEncoding(family.getDataBlockEncoding()) .withEncryptionContext(Encryption.Context.NONE) .withCreateTime(EnvironmentEdgeManager.currentTime()) .build(); StoreFileWriter.Builder builder = new StoreFileWriter.Builder(conf, writerCacheConf, dir.getFileSystem(conf)) .withOutputDir(new Path(dir, family.getNameAsString())) .withBloomType(family.getBloomFilterType()) .withMaxKeyCount(Integer.MAX_VALUE) .withFileContext(hFileContext); StoreFileWriter writer = builder.build(); Put put = new Put(key); put.addColumn(FAMILY, COLUMN, value); for (Cell c : put.get(FAMILY, COLUMN)) { writer.append(c); } writer.close(); }
Example #22
Source File: TestHFile.java From hbase with Apache License 2.0 | 5 votes |
/** * Make sure the ordinals for our compression algorithms do not change on us. */ @Test public void testCompressionOrdinance() { assertTrue(Compression.Algorithm.LZO.ordinal() == 0); assertTrue(Compression.Algorithm.GZ.ordinal() == 1); assertTrue(Compression.Algorithm.NONE.ordinal() == 2); assertTrue(Compression.Algorithm.SNAPPY.ordinal() == 3); assertTrue(Compression.Algorithm.LZ4.ordinal() == 4); }
Example #23
Source File: TestDataBlockEncoders.java From hbase with Apache License 2.0 | 5 votes |
@Test public void testNextOnSample() throws IOException { List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags); for (DataBlockEncoding encoding : DataBlockEncoding.values()) { if (encoding.getEncoder() == null) { continue; } DataBlockEncoder encoder = encoding.getEncoder(); ByteBuffer encodedBuffer = encodeKeyValues(encoding, sampleKv, getEncodingContext(Compression.Algorithm.NONE, encoding), this.useOffheapData); HFileContext meta = new HFileContextBuilder() .withHBaseCheckSum(false) .withIncludesMvcc(includesMemstoreTS) .withIncludesTags(includesTags) .withCompression(Compression.Algorithm.NONE) .build(); DataBlockEncoder.EncodedSeeker seeker = encoder.createSeeker(encoder.newDataBlockDecodingContext(meta)); seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer)); int i = 0; do { KeyValue expectedKeyValue = sampleKv.get(i); Cell cell = seeker.getCell(); if (PrivateCellUtil.compareKeyIgnoresMvcc(CellComparatorImpl.COMPARATOR, expectedKeyValue, cell) != 0) { int commonPrefix = PrivateCellUtil .findCommonPrefixInFlatKey(expectedKeyValue, cell, false, true); fail(String.format("next() produces wrong results " + "encoder: %s i: %d commonPrefix: %d" + "\n expected %s\n actual %s", encoder .toString(), i, commonPrefix, Bytes.toStringBinary(expectedKeyValue.getBuffer(), expectedKeyValue.getKeyOffset(), expectedKeyValue.getKeyLength()), CellUtil.toString( cell, false))); } i++; } while (seeker.next()); } }
Example #24
Source File: TestHStore.java From hbase with Apache License 2.0 | 5 votes |
@Test public void testHFileContextSetWithCFAndTable() throws Exception { init(this.name.getMethodName()); StoreFileWriter writer = store.createWriterInTmp(10000L, Compression.Algorithm.NONE, false, true, false, true); HFileContext hFileContext = writer.getHFileWriter().getFileContext(); assertArrayEquals(family, hFileContext.getColumnFamily()); assertArrayEquals(table, hFileContext.getTableName()); }
Example #25
Source File: TestDataBlockEncodingTool.java From hbase with Apache License 2.0 | 5 votes |
private static void testDataBlockingTool(Path path) throws IOException { Configuration conf = HBaseConfiguration.create(); int maxKV = Integer.MAX_VALUE; boolean doVerify = true; boolean doBenchmark = true; String testHFilePath = path.toString(); DataBlockEncodingTool.testCodecs(conf, maxKV, testHFilePath, Compression.Algorithm.GZ.getName(), doBenchmark, doVerify); }
Example #26
Source File: HTableOperatorImpl.java From PoseidonX with Apache License 2.0 | 5 votes |
private HColumnDescriptor changeCd(ColumnDescriptor cd){ HColumnDescriptor family = new HColumnDescriptor(cd.getFamilyName()); if(cd.isCompress()){ family.setCompactionCompressionType(Compression.Algorithm.GZ); } return family ; }
Example #27
Source File: TestMultiColumnScanner.java From hbase with Apache License 2.0 | 5 votes |
public static Collection<Object[]> generateParams(Compression.Algorithm algo, boolean useDataBlockEncoding) { List<Object[]> parameters = new ArrayList<>(); for (BloomType bloomType : BloomType.values()) { DataBlockEncoding dataBlockEncoding = useDataBlockEncoding ? DataBlockEncoding.PREFIX : DataBlockEncoding.NONE; parameters.add(new Object[] { algo, bloomType, dataBlockEncoding }); } return parameters; }
Example #28
Source File: HBaseCreateTable.java From SparkOnALog with Apache License 2.0 | 5 votes |
public static void main(String[] args) throws IOException { if (args.length == 0) { System.out.println("CreateTable {tableName} {columnFamilyName}"); return; } String tableName = args[0]; String columnFamilyName = args[1]; HBaseAdmin admin = new HBaseAdmin(new Configuration()); HTableDescriptor tableDescriptor = new HTableDescriptor(); tableDescriptor.setName(Bytes.toBytes(tableName)); HColumnDescriptor columnDescriptor = new HColumnDescriptor(columnFamilyName); columnDescriptor.setCompressionType(Compression.Algorithm.SNAPPY); columnDescriptor.setBlocksize(64 * 1024); columnDescriptor.setBloomFilterType(BloomType.ROW); tableDescriptor.addFamily(columnDescriptor); //tableDescriptor.setValue(tableDescriptor.SPLIT_POLICY, ConstantSizeRegionSplitPolicy.class.getName()); System.out.println("-Creating Table"); admin.createTable(tableDescriptor); admin.close(); System.out.println("-Done"); }
Example #29
Source File: HbaseSchemaCommandManager.java From pinpoint with Apache License 2.0 | 5 votes |
private Compression.Algorithm getCompressionAlgorithm(String compression) { if (StringUtils.isEmpty(compression)) { return Compression.Algorithm.NONE; } for (Compression.Algorithm compressionAlgorithm : Compression.Algorithm.values()) { if (compressionAlgorithm.getName().equalsIgnoreCase(compression)) { return compressionAlgorithm; } } throw new IllegalArgumentException("Unknown compression option : " + compression); }
Example #30
Source File: TestBlocksScanned.java From hbase with Apache License 2.0 | 5 votes |
@Test public void testBlocksScanned() throws Exception { byte [] tableName = Bytes.toBytes("TestBlocksScanned"); TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor = new TableDescriptorBuilder.ModifyableTableDescriptor(TableName.valueOf(tableName)); tableDescriptor.setColumnFamily( new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY) .setMaxVersions(10) .setBlockCacheEnabled(true) .setBlocksize(BLOCK_SIZE) .setCompressionType(Compression.Algorithm.NONE) ); _testBlocksScanned(tableDescriptor); }