Java Code Examples for org.apache.hadoop.hbase.HTableDescriptor#getFamily()

The following examples show how to use org.apache.hadoop.hbase.HTableDescriptor#getFamily() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseShims.java    From phoenix-omid with Apache License 2.0 5 votes vote down vote up
public static boolean OmidCompactionEnabled(ObserverContext<RegionCoprocessorEnvironment> env,
                              Store store,
                              String cfFlagValue) {
    HTableDescriptor desc = env.getEnvironment().getRegion().getTableDesc();
    HColumnDescriptor famDesc
            = desc.getFamily(Bytes.toBytes(store.getColumnFamilyName()));
    return Boolean.valueOf(famDesc.getValue(cfFlagValue));
}
 
Example 2
Source File: HBaseShims.java    From phoenix-omid with Apache License 2.0 5 votes vote down vote up
public static void setCompaction(Connection conn, TableName table, byte[] columnFamily, String key, String value)
        throws IOException {
    try(Admin admin = conn.getAdmin()) {
        HTableDescriptor desc = admin.getTableDescriptor(table);
        HColumnDescriptor cfDesc = desc.getFamily(columnFamily);
        cfDesc.setValue(key, value);
        admin.modifyColumn(table, cfDesc);
    }
}
 
Example 3
Source File: TransactionProcessorTest.java    From phoenix-tephra with Apache License 2.0 5 votes vote down vote up
private HRegion updateTtl(HRegion region, byte[] family, long ttl) throws Exception {
  region.close();
  HTableDescriptor htd = region.getTableDesc();
  HColumnDescriptor cfd = htd.getFamily(family);
  if (ttl > 0) {
    cfd.setValue(TxConstants.PROPERTY_TTL, String.valueOf(ttl));
  }
  cfd.setMaxVersions(10);
  return HRegion.openHRegion(region.getRegionInfo(), htd, region.getWAL(), conf,
                             new LocalRegionServerServices(conf, ServerName.valueOf(
                               InetAddress.getLocalHost().getHostName(), 0, System.currentTimeMillis())), null);
}
 
Example 4
Source File: TransactionProcessorTest.java    From phoenix-tephra with Apache License 2.0 5 votes vote down vote up
private HRegion updateTtl(HRegion region, byte[] family, long ttl) throws Exception {
  region.close();
  HTableDescriptor htd = region.getTableDesc();
  HColumnDescriptor cfd = htd.getFamily(family);
  if (ttl > 0) {
    cfd.setValue(TxConstants.PROPERTY_TTL, String.valueOf(ttl));
  }
  cfd.setMaxVersions(10);
  return HRegion.openHRegion(region.getRegionInfo(), htd, region.getWAL(), conf,
                             new LocalRegionServerServices(conf, ServerName.valueOf(
                               InetAddress.getLocalHost().getHostName(), 0, System.currentTimeMillis())), null);
}
 
Example 5
Source File: TransactionProcessorTest.java    From phoenix-tephra with Apache License 2.0 5 votes vote down vote up
private HRegion updateTtl(HRegion region, byte[] family, long ttl) throws Exception {
  region.close();
  HTableDescriptor htd = region.getTableDesc();
  HColumnDescriptor cfd = htd.getFamily(family);
  if (ttl > 0) {
    cfd.setValue(TxConstants.PROPERTY_TTL, String.valueOf(ttl));
  }
  cfd.setMaxVersions(10);
  return HRegion.openHRegion(region.getRegionInfo(), htd, region.getWAL(), conf,
                             new LocalRegionServerServices(conf, ServerName.valueOf(
                               InetAddress.getLocalHost().getHostName(), 0, System.currentTimeMillis())), null);
}
 
Example 6
Source File: TransactionProcessorTest.java    From phoenix-tephra with Apache License 2.0 5 votes vote down vote up
private HRegion updateTtl(HRegion region, byte[] family, long ttl) throws Exception {
  region.close();
  HTableDescriptor htd = region.getTableDesc();
  HColumnDescriptor cfd = htd.getFamily(family);
  if (ttl > 0) {
    cfd.setValue(TxConstants.PROPERTY_TTL, String.valueOf(ttl));
  }
  cfd.setMaxVersions(10);
  return HRegion.openHRegion(region.getRegionInfo(), htd, region.getWAL(), conf,
                             new LocalRegionServerServices(conf, ServerName.valueOf(
                               InetAddress.getLocalHost().getHostName(), 0, System.currentTimeMillis())), null);
}
 
Example 7
Source File: TransactionProcessorTest.java    From phoenix-tephra with Apache License 2.0 5 votes vote down vote up
private HRegion updateTtl(HRegion region, byte[] family, long ttl) throws Exception {
  region.close();
  HTableDescriptor htd = region.getTableDesc();
  HColumnDescriptor cfd = htd.getFamily(family);
  if (ttl > 0) {
    cfd.setValue(TxConstants.PROPERTY_TTL, String.valueOf(ttl));
  }
  cfd.setMaxVersions(10);
  return HRegion.openHRegion(region.getRegionInfo(), htd, region.getWAL(), conf,
                             new LocalRegionServerServices(conf, ServerName.valueOf(
                               InetAddress.getLocalHost().getHostName(), 0, System.currentTimeMillis())), null);
}
 
Example 8
Source File: CreateNewHbase.java    From learning-hadoop with Apache License 2.0 5 votes vote down vote up
public static void createTable(String tabName)throws Exception{
	HBaseAdmin admin = new HBaseAdmin(conf);
	if (admin.tableExists(tabName)) {
	System.out.println(tabName + " exists!");
	admin.close();
	return;
	}
	HTableDescriptor table = new HTableDescriptor(tabName);
	table.addFamily(new HColumnDescriptor("f1"));
	table.addFamily(new HColumnDescriptor("f2"));
	table.addFamily(new HColumnDescriptor("f3"));
	table.getFamily(Bytes.toBytes("f1"));
	admin.createTable(table);
	admin.close();
}
 
Example 9
Source File: TestTableDescriptorModificationFromClient.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Test
public void testModifyColumnFamily() throws IOException {
  Admin admin = TEST_UTIL.getAdmin();

  ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor cfDescriptor =
    new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(FAMILY_0);
  int blockSize = cfDescriptor.getBlocksize();
  // Create a table with one families
  TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor =
    new TableDescriptorBuilder.ModifyableTableDescriptor(TABLE_NAME);

  tableDescriptor.setColumnFamily(cfDescriptor);
  admin.createTable(tableDescriptor);
  admin.disableTable(TABLE_NAME);
  try {
    // Verify the table descriptor
    verifyTableDescriptor(TABLE_NAME, FAMILY_0);

    int newBlockSize = 2 * blockSize;
    cfDescriptor.setBlocksize(newBlockSize);

    // Modify colymn family
    admin.modifyColumnFamily(TABLE_NAME, cfDescriptor);

    HTableDescriptor htd = new HTableDescriptor(admin.getDescriptor(TABLE_NAME));
    HColumnDescriptor hcfd = htd.getFamily(FAMILY_0);
    assertTrue(hcfd.getBlocksize() == newBlockSize);
  } finally {
    admin.deleteTable(TABLE_NAME);
  }
}
 
Example 10
Source File: AlterTableTest.java    From phoenix with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Test
public void testAlterColumnFamilyProperty() throws Exception {

    Properties props = new Properties(TEST_PROPERTIES);
    Connection conn = DriverManager.getConnection(getUrl(), props);
    
    String ddl = "CREATE TABLE test_table " +
            "  (a_string varchar not null, col1 integer" +
            "  CONSTRAINT pk PRIMARY KEY (a_string))\n";
    try {
            conn.createStatement().execute(ddl);
          
            conn.createStatement().execute("ALTER TABLE TEST_TABLE ADD col2 integer IN_MEMORY=true");
            
            HTableInterface htable1 = conn.unwrap(PhoenixConnection.class).getQueryServices().getTable(Bytes.toBytes("TEST_TABLE")); 
            HTableDescriptor htableDesciptor1 = htable1.getTableDescriptor();
            HColumnDescriptor hcolumnDescriptor1 = htableDesciptor1.getFamily(Bytes.toBytes("_0"));
            assertTrue(hcolumnDescriptor1.isInMemory());
           
            try {
                
                conn.createStatement().execute("ALTER TABLE TEST_TABLE SET IN_MEMORY=false");
                fail("Should have caught exception.");
                
            } catch (SQLException e) {
                assertTrue(e.getMessage(), e.getMessage().contains("ERROR 1025 (42Y84): Unsupported property set in ALTER TABLE command."));
            } 
    }finally {
        conn.close();
    }
 }
 
Example 11
Source File: QueryDatabaseMetaDataIT.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Test
public void testCreateOnExistingTable() throws Exception {
    PhoenixConnection pconn = DriverManager.getConnection(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES)).unwrap(PhoenixConnection.class);
    String tableName = MDTEST_NAME;
    String schemaName = MDTEST_SCHEMA_NAME;
    byte[] cfA = Bytes.toBytes(SchemaUtil.normalizeIdentifier("a"));
    byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
    byte[] cfC = Bytes.toBytes("c");
    byte[][] familyNames = new byte[][] {cfB, cfC};
    byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
    HBaseAdmin admin = pconn.getQueryServices().getAdmin();
    try {
        admin.disableTable(htableName);
        admin.deleteTable(htableName);
        admin.enableTable(htableName);
    } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
    }
    
    @SuppressWarnings("deprecation")
    HTableDescriptor descriptor = new HTableDescriptor(htableName);
    for (byte[] familyName : familyNames) {
        HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
        descriptor.addFamily(columnDescriptor);
    }
    admin.createTable(descriptor);
        
    long ts = nextTimestamp();
    Properties props = new Properties();
    props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
    PhoenixConnection conn1 = DriverManager.getConnection(getUrl(), props).unwrap(PhoenixConnection.class);
    ensureTableCreated(getUrl(), tableName, null, ts);
    
    descriptor = admin.getTableDescriptor(htableName);
    assertEquals(3,descriptor.getColumnFamilies().length);
    HColumnDescriptor cdA = descriptor.getFamily(cfA);
    assertNotEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdA.getKeepDeletedCellsAsEnum());
    assertEquals(DataBlockEncoding.NONE, cdA.getDataBlockEncoding()); // Overriden using WITH
    assertEquals(1,cdA.getMaxVersions());// Overriden using WITH
    HColumnDescriptor cdB = descriptor.getFamily(cfB);
    // Allow KEEP_DELETED_CELLS to be false for VIEW
    assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdB.getKeepDeletedCellsAsEnum());
    assertEquals(DataBlockEncoding.NONE, cdB.getDataBlockEncoding()); // Should keep the original value.
    // CF c should stay the same since it's not a Phoenix cf.
    HColumnDescriptor cdC = descriptor.getFamily(cfC);
    assertNotNull("Column family not found", cdC);
    assertEquals(HColumnDescriptor.DEFAULT_KEEP_DELETED, cdC.getKeepDeletedCellsAsEnum());
    assertFalse(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING == cdC.getDataBlockEncoding());
    assertTrue(descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName()));
    assertTrue(descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName()));
    assertTrue(descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName()));
    admin.close();
     
    int rowCount = 5;
    String upsert = "UPSERT INTO " + tableName + "(id,col1,col2) VALUES(?,?,?)";
    PreparedStatement ps = conn1.prepareStatement(upsert);
    for (int i = 0; i < rowCount; i++) {
        ps.setString(1, Integer.toString(i));
        ps.setInt(2, i+1);
        ps.setInt(3, i+2);
        ps.execute();
    }
    conn1.commit();
    conn1.close();
    
    props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
    Connection conn2 = DriverManager.getConnection(getUrl(), props);
    String query = "SELECT count(1) FROM " + tableName;
    ResultSet rs = conn2.createStatement().executeQuery(query);
    assertTrue(rs.next());
    assertEquals(rowCount, rs.getLong(1));
    
    query = "SELECT id, col1,col2 FROM " + tableName;
    rs = conn2.createStatement().executeQuery(query);
    for (int i = 0; i < rowCount; i++) {
        assertTrue(rs.next());
        assertEquals(Integer.toString(i),rs.getString(1));
        assertEquals(i+1, rs.getInt(2));
        assertEquals(i+2, rs.getInt(3));
    }
    assertFalse(rs.next());
    conn2.close();
}
 
Example 12
Source File: QueryDatabaseMetaDataTest.java    From phoenix with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
@Test
public void testCreateOnExistingTable() throws Exception {
    PhoenixConnection pconn = DriverManager.getConnection(PHOENIX_JDBC_URL, TEST_PROPERTIES).unwrap(PhoenixConnection.class);
    String tableName = MDTEST_NAME;
    String schemaName = MDTEST_SCHEMA_NAME;
    byte[] cfA = Bytes.toBytes(SchemaUtil.normalizeIdentifier("a"));
    byte[] cfB = Bytes.toBytes(SchemaUtil.normalizeIdentifier("b"));
    byte[] cfC = Bytes.toBytes("c");
    byte[][] familyNames = new byte[][] {cfB, cfC};
    byte[] htableName = SchemaUtil.getTableNameAsBytes(schemaName, tableName);
    HBaseAdmin admin = pconn.getQueryServices().getAdmin();
    try {
        admin.disableTable(htableName);
        admin.deleteTable(htableName);
        admin.enableTable(htableName);
    } catch (org.apache.hadoop.hbase.TableNotFoundException e) {
    }
    
    HTableDescriptor descriptor = new HTableDescriptor(htableName);
    for (byte[] familyName : familyNames) {
        HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
        descriptor.addFamily(columnDescriptor);
    }
    admin.createTable(descriptor);
        
    long ts = nextTimestamp();
    Properties props = new Properties();
    props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 5));
    PhoenixConnection conn1 = DriverManager.getConnection(PHOENIX_JDBC_URL, props).unwrap(PhoenixConnection.class);
    ensureTableCreated(getUrl(), tableName, null, ts);
    
    descriptor = admin.getTableDescriptor(htableName);
    assertEquals(3,descriptor.getColumnFamilies().length);
    HColumnDescriptor cdA = descriptor.getFamily(cfA);
    assertTrue(cdA.getKeepDeletedCells());
    assertEquals(DataBlockEncoding.NONE, cdA.getDataBlockEncoding()); // Overriden using WITH
    assertEquals(1,cdA.getMaxVersions());// Overriden using WITH
    HColumnDescriptor cdB = descriptor.getFamily(cfB);
    assertTrue(cdB.getKeepDeletedCells());
    assertEquals(DataBlockEncoding.NONE, cdB.getDataBlockEncoding()); // Should keep the original value.
    // CF c should stay the same since it's not a Phoenix cf.
    HColumnDescriptor cdC = descriptor.getFamily(cfC);
    assertNotNull("Column family not found", cdC);
    assertFalse(cdC.getKeepDeletedCells());
    assertFalse(SchemaUtil.DEFAULT_DATA_BLOCK_ENCODING == cdC.getDataBlockEncoding());
    assertTrue(descriptor.hasCoprocessor(UngroupedAggregateRegionObserver.class.getName()));
    assertTrue(descriptor.hasCoprocessor(GroupedAggregateRegionObserver.class.getName()));
    assertTrue(descriptor.hasCoprocessor(ServerCachingEndpointImpl.class.getName()));
    admin.close();
     
    int rowCount = 5;
    String upsert = "UPSERT INTO " + tableName + "(id,col1,col2) VALUES(?,?,?)";
    PreparedStatement ps = conn1.prepareStatement(upsert);
    for (int i = 0; i < rowCount; i++) {
        ps.setString(1, Integer.toString(i));
        ps.setInt(2, i+1);
        ps.setInt(3, i+2);
        ps.execute();
    }
    conn1.commit();
    conn1.close();
    
    props.setProperty(PhoenixRuntime.CURRENT_SCN_ATTRIB, Long.toString(ts + 6));
    Connection conn2 = DriverManager.getConnection(PHOENIX_JDBC_URL, props);
    String query = "SELECT count(1) FROM " + tableName;
    ResultSet rs = conn2.createStatement().executeQuery(query);
    assertTrue(rs.next());
    assertEquals(rowCount, rs.getLong(1));
    
    query = "SELECT id, col1,col2 FROM " + tableName;
    rs = conn2.createStatement().executeQuery(query);
    for (int i = 0; i < rowCount; i++) {
        assertTrue(rs.next());
        assertEquals(Integer.toString(i),rs.getString(1));
        assertEquals(i+1, rs.getInt(2));
        assertEquals(i+2, rs.getInt(3));
    }
    assertFalse(rs.next());
    conn2.close();
}