Java Code Examples for org.apache.hadoop.hbase.HTableDescriptor#getFamiliesKeys()

The following examples show how to use org.apache.hadoop.hbase.HTableDescriptor#getFamiliesKeys() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseConnection.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private static Set<String> getFamilyNames(HTableDescriptor desc) {
    HashSet<String> result = Sets.newHashSet();
    for (byte[] bytes : desc.getFamiliesKeys()) {
        try {
            result.add(new String(bytes, "UTF-8"));
        } catch (UnsupportedEncodingException e) {
            logger.error(e.toString());
        }
    }
    return result;
}
 
Example 2
Source File: HBaseConnection.java    From kylin with Apache License 2.0 5 votes vote down vote up
private static Set<String> getFamilyNames(HTableDescriptor desc) {
    HashSet<String> result = Sets.newHashSet();
    for (byte[] bytes : desc.getFamiliesKeys()) {
        try {
            result.add(new String(bytes, "UTF-8"));
        } catch (UnsupportedEncodingException e) {
            logger.error(e.toString());
        }
    }
    return result;
}
 
Example 3
Source File: AbstractTestWALReplay.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * testcase for https://issues.apache.org/jira/browse/HBASE-14949.
 */
private void testNameConflictWhenSplit(boolean largeFirst) throws IOException,
    StreamLacksCapabilityException {
  final TableName tableName = TableName.valueOf("testReplayEditsWrittenIntoWAL");
  final MultiVersionConcurrencyControl mvcc = new MultiVersionConcurrencyControl();
  final RegionInfo hri = createBasic3FamilyHRegionInfo(tableName);
  final Path basedir = CommonFSUtils.getTableDir(hbaseRootDir, tableName);
  deleteDir(basedir);

  final HTableDescriptor htd = createBasic1FamilyHTD(tableName);
  NavigableMap<byte[], Integer> scopes = new TreeMap<>(Bytes.BYTES_COMPARATOR);
  for (byte[] fam : htd.getFamiliesKeys()) {
    scopes.put(fam, 0);
  }
  HRegion region = HBaseTestingUtility.createRegionAndWAL(hri, hbaseRootDir, this.conf, htd);
  HBaseTestingUtility.closeRegionAndWAL(region);
  final byte[] family = htd.getColumnFamilies()[0].getName();
  final byte[] rowName = tableName.getName();
  FSWALEntry entry1 = createFSWALEntry(htd, hri, 1L, rowName, family, ee, mvcc, 1, scopes);
  FSWALEntry entry2 = createFSWALEntry(htd, hri, 2L, rowName, family, ee, mvcc, 2, scopes);

  Path largeFile = new Path(logDir, "wal-1");
  Path smallFile = new Path(logDir, "wal-2");
  writerWALFile(largeFile, Arrays.asList(entry1, entry2));
  writerWALFile(smallFile, Arrays.asList(entry2));
  FileStatus first, second;
  if (largeFirst) {
    first = fs.getFileStatus(largeFile);
    second = fs.getFileStatus(smallFile);
  } else {
    first = fs.getFileStatus(smallFile);
    second = fs.getFileStatus(largeFile);
  }
  WALSplitter.splitLogFile(hbaseRootDir, first, fs, conf, null, null, null, wals, null);
  WALSplitter.splitLogFile(hbaseRootDir, second, fs, conf, null, null, null, wals, null);
  WAL wal = createWAL(this.conf, hbaseRootDir, logName);
  region = HRegion.openHRegion(conf, this.fs, hbaseRootDir, hri, htd, wal);
  assertTrue(region.getOpenSeqNum() > mvcc.getWritePoint());
  assertEquals(2, region.get(new Get(rowName)).size());
}