Java Code Examples for org.apache.hadoop.hbase.client.HTableInterface#batch()

The following examples show how to use org.apache.hadoop.hbase.client.HTableInterface#batch() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: UngroupedAggregateRegionObserver.java    From phoenix with Apache License 2.0 6 votes vote down vote up
private void commitIndexMutations(final ObserverContext<RegionCoprocessorEnvironment> c,
        HRegion region, List<Mutation> indexMutations) throws IOException {
    // Get indexRegion corresponding to data region
    HRegion indexRegion = IndexUtil.getIndexRegion(c.getEnvironment());
    if (indexRegion != null) {
        commitBatch(indexRegion, indexMutations, null);
    } else {
        TableName indexTable =
                TableName.valueOf(MetaDataUtil.getLocalIndexPhysicalName(region.getTableDesc()
                        .getName()));
        HTableInterface table = null;
        try {
            table = c.getEnvironment().getTable(indexTable);
            table.batch(indexMutations);
        } catch (InterruptedException ie) {
            ServerUtil.throwIOException(c.getEnvironment().getRegion().getRegionNameAsString(),
                ie);
        } finally {
            if (table != null) table.close();
         }
    }
    indexMutations.clear();
}
 
Example 2
Source File: BasicFraudHBaseService.java    From hadoop-arch-book with Apache License 2.0 6 votes vote down vote up
public void updateProfileCountsForSaleInHBase(Long buyerId, Long sellerId, ItemSaleEvent event) throws IOException, InterruptedException {
  HTableInterface profileTable = hTablePool.getTable(DataModelConsts.PROFILE_TABLE);
  ArrayList<Row> actions = new ArrayList<Row>();
  
  Increment buyerValueIncrement = new Increment(generateProfileRowKey(buyerId));
  buyerValueIncrement.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_PURCHASES_VALUE_COL, event.getItemValue());
  buyerValueIncrement.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_VALUE_OF_PAST_SELLS_COL, event.getItemValue());
  actions.add(buyerValueIncrement);
  
  Increment sellerValueIncrement = new Increment(generateProfileRowKey(sellerId));
  sellerValueIncrement.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_SELLS_VALUE_COL, event.getItemValue());
  sellerValueIncrement.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_VALUE_OF_PAST_SELLS_COL, event.getItemValue());
  actions.add(sellerValueIncrement);
  
  profileTable.batch(actions);
  
}
 
Example 3
Source File: BasicFraudHBaseService.java    From hadoop-arch-book with Apache License 2.0 6 votes vote down vote up
public void logInProfileInHBase(long userId, String ipAddress) throws IOException, Exception {
  HTableInterface profileTable = hTablePool.getTable(DataModelConsts.PROFILE_TABLE);
  
  ArrayList<Row> actions = new ArrayList<Row>();
  
  byte[] profileRowKey = generateProfileRowKey(userId);

  Delete delete = new Delete(profileRowKey);
  delete.deleteColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_PURCHASES_VALUE_COL);
  delete.deleteColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_SELLS_VALUE_COL);
  actions.add(delete);
  
  Increment increment = new Increment(profileRowKey);
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LOG_IN_COUNT_COL, 1);
  actions.add(increment);
  
  Put put = new Put(profileRowKey);
  put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LAST_LOG_IN_COL, Bytes.toBytes(System.currentTimeMillis()));
  put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LOG_IN_IP_ADDERSSES, Bytes.toBytes(ipAddress));
  actions.add(put);
  
  profileTable.batch(actions);
}
 
Example 4
Source File: BasicFraudHBaseService.java    From hadoop-arch-book with Apache License 2.0 6 votes vote down vote up
@Override
public void createProfile(long userId, ProfilePojo pojo, String ipAddress) throws Exception {
  HTableInterface profileTable = hTablePool.getTable(DataModelConsts.PROFILE_TABLE);
  
  ArrayList<Row> actions = new ArrayList<Row>();
  
  byte[] rowKey = generateProfileRowKey(userId);
  Put put = new Put(rowKey);
  put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.FIXED_INFO_COL, Bytes.toBytes(pojo.getUsername() + "|" + pojo.getAge() + "|" + System.currentTimeMillis()));
  put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LOG_IN_IP_ADDERSSES, Bytes.toBytes(ipAddress));
  put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LAST_LOG_IN_COL, Bytes.toBytes(System.currentTimeMillis()));
  actions.add(put);
  
  Increment increment = new Increment(rowKey);
  
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LOG_IN_COUNT_COL, 1);
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_SELLS_COL, 0);
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_PURCHASES_COL, 0);
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_VALUE_OF_PAST_PURCHASES_COL, 0);
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_VALUE_OF_PAST_SELLS_COL, 0);
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_SELLS_VALUE_COL, 0);
  increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_PURCHASES_VALUE_COL, 0);
  actions.add(increment);
  
  profileTable.batch(actions);
}
 
Example 5
Source File: DynamicColumnIT.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("deprecation")
private static void initTableValues() throws Exception {
    ConnectionQueryServices services = driver.getConnectionQueryServices(getUrl(), PropertiesUtil.deepCopy(TEST_PROPERTIES));
    HTableInterface hTable = services.getTable(SchemaUtil.getTableNameAsBytes(HBASE_DYNAMIC_COLUMNS_SCHEMA_NAME,HBASE_DYNAMIC_COLUMNS));
    try {
        // Insert rows using standard HBase mechanism with standard HBase "types"
        List<Row> mutations = new ArrayList<Row>();
        byte[] dv = Bytes.toBytes("DV");
        byte[] first = Bytes.toBytes("F");
        byte[] f1v1 = Bytes.toBytes("F1V1");
        byte[] f1v2 = Bytes.toBytes("F1V2");
        byte[] f2v1 = Bytes.toBytes("F2V1");
        byte[] f2v2 = Bytes.toBytes("F2V2");
        byte[] key = Bytes.toBytes("entry1");

        Put put = new Put(key);
        put.add(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, dv, Bytes.toBytes("default"));
        put.add(QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES, first, Bytes.toBytes("first"));
        put.add(FAMILY_NAME, f1v1, Bytes.toBytes("f1value1"));
        put.add(FAMILY_NAME, f1v2, Bytes.toBytes("f1value2"));
        put.add(FAMILY_NAME2, f2v1, Bytes.toBytes("f2value1"));
        put.add(FAMILY_NAME2, f2v2, Bytes.toBytes("f2value2"));
        mutations.add(put);

        hTable.batch(mutations);

    } finally {
        hTable.close();
    }
    // Create Phoenix table after HBase table was created through the native APIs
    // The timestamp of the table creation must be later than the timestamp of the data
    ensureTableCreated(getUrl(), HBASE_DYNAMIC_COLUMNS);
}
 
Example 6
Source File: BasicFraudHBaseService.java    From hadoop-arch-book with Apache License 2.0 5 votes vote down vote up
@Override
public void createBulkProfile(ArrayList<ProfileCreatePojo> pojoList)
    throws Exception {
  
  HTableInterface profileTable = hTablePool.getTable(DataModelConsts.PROFILE_TABLE);
  ArrayList<Row> actions = new ArrayList<Row>();
  
  for (ProfileCreatePojo pojo: pojoList) {
    
    
    byte[] rowKey = generateProfileRowKey(pojo.getUserId());
    Put put = new Put(rowKey);
    put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.FIXED_INFO_COL, Bytes.toBytes(pojo.getPojo().getUsername() + "|" + pojo.getPojo().getAge() + "|" + System.currentTimeMillis()));
    put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LOG_IN_IP_ADDERSSES, Bytes.toBytes(pojo.getIpAddress()));
    put.add(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LAST_LOG_IN_COL, Bytes.toBytes(System.currentTimeMillis()));
    actions.add(put);
    
    Increment increment = new Increment(rowKey);
    
    increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.LOG_IN_COUNT_COL, 1);
    increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_SELLS_COL, 0);
    increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_PURCHASES_COL, 0);
    increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_VALUE_OF_PAST_PURCHASES_COL, 0);
    increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.TOTAL_VALUE_OF_PAST_SELLS_COL, 0);
    increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_SELLS_VALUE_COL, 0);
    increment.addColumn(DataModelConsts.PROFILE_COLUMN_FAMILY, DataModelConsts.CURRENT_LOG_IN_PURCHASES_VALUE_COL, 0);
    actions.add(increment);
  }

  profileTable.batch(actions);
}
 
Example 7
Source File: DynamicColumnTest.java    From phoenix with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
private static void initTableValues() throws Exception {
    ConnectionQueryServices services = driver.getConnectionQueryServices(getUrl(), TEST_PROPERTIES);
    HTableInterface hTable = services.getTable(SchemaUtil.getTableNameAsBytes(HBASE_DYNAMIC_COLUMNS_SCHEMA_NAME,HBASE_DYNAMIC_COLUMNS));
    try {
        // Insert rows using standard HBase mechanism with standard HBase "types"
        List<Row> mutations = new ArrayList<Row>();
        byte[] dv = Bytes.toBytes("DV");
        byte[] first = Bytes.toBytes("F");
        byte[] f1v1 = Bytes.toBytes("F1V1");
        byte[] f1v2 = Bytes.toBytes("F1V2");
        byte[] f2v1 = Bytes.toBytes("F2V1");
        byte[] f2v2 = Bytes.toBytes("F2V2");
        byte[] key = Bytes.toBytes("entry1");

        Put put = new Put(key);
        put.add(QueryConstants.EMPTY_COLUMN_BYTES, dv, Bytes.toBytes("default"));
        put.add(QueryConstants.EMPTY_COLUMN_BYTES, first, Bytes.toBytes("first"));
        put.add(FAMILY_NAME, f1v1, Bytes.toBytes("f1value1"));
        put.add(FAMILY_NAME, f1v2, Bytes.toBytes("f1value2"));
        put.add(FAMILY_NAME2, f2v1, Bytes.toBytes("f2value1"));
        put.add(FAMILY_NAME2, f2v2, Bytes.toBytes("f2value2"));
        mutations.add(put);

        hTable.batch(mutations);

    } finally {
        hTable.close();
    }
    // Create Phoenix table after HBase table was created through the native APIs
    // The timestamp of the table creation must be later than the timestamp of the data
    ensureTableCreated(getUrl(), HBASE_DYNAMIC_COLUMNS);
}