Java Code Examples for org.apache.hadoop.hbase.client.HTable#setWriteBufferSize()

The following examples show how to use org.apache.hadoop.hbase.client.HTable#setWriteBufferSize() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HBaseOutputFormat.java    From alchemy with Apache License 2.0 5 votes vote down vote up
@Override
public void open(int taskNumber, int numTasks) throws IOException {
    table = new HTable(conf, this.hbaseProperties.getTableName());
    if (this.hbaseProperties.getBufferSize() > 0) {
        table.setAutoFlushTo(false);
        table.setWriteBufferSize(this.hbaseProperties.getBufferSize());
    }
}
 
Example 2
Source File: HBaseTable.java    From wifi with Apache License 2.0 5 votes vote down vote up
public static void put(String row, String column, String data)
		throws Exception {
	HTable table = new HTable(cfg, tableName);
	table.setAutoFlush(false);
	table.setWriteBufferSize(10 * 1024 * 1024);
	Put p1 = new Put(Bytes.toBytes(row));
	p1.add(Bytes.toBytes(familyName), Bytes.toBytes(column),
			Bytes.toBytes(data));
	table.put(p1);
	System.out.println("put '" + row + "','" + familyName + ":" + column
			+ "','" + data + "'");
}
 
Example 3
Source File: HBaseTable.java    From wifi with Apache License 2.0 5 votes vote down vote up
public static void put(String row,String column,String data) throws Exception {
    HTable table = new HTable(cfg, tableName);
    table.setAutoFlush(false);
    table.setWriteBufferSize(10*1024*1024);
    Put p1=new Put(Bytes.toBytes(row));
    p1.add(Bytes.toBytes(familyName), Bytes.toBytes(column), Bytes.toBytes(data));
    table.put(p1);
    System.out.println("put '"+row+"','"+familyName+":"+column+"','"+data+"'");
}
 
Example 4
Source File: Mapper2HbaseDemo.java    From bigdata-tutorial with Apache License 2.0 5 votes vote down vote up
@Override
protected void setup(Context context) throws IOException,
		InterruptedException {
	super.setup(context);
	conf = HBaseConfiguration.create(context.getConfiguration());
	conf.set("hbase.zookeeper.quorum", "zk1.hadoop,zk2.hadoop,zk3.hadoop");
	conf.set("hbase.zookeeper.property.clientPort", "2181");

	htable = new HTable(conf, "micmiu");
	htable.setAutoFlush(false);
	htable.setWriteBufferSize(12 * 1024 * 1024);//12M
	wal = true;
}
 
Example 5
Source File: HBaseWriter.java    From hiped2 with Apache License 2.0 4 votes vote down vote up
/**
 * The MapReduce driver - setup and launch the job.
 *
 * @param args the command-line arguments
 * @return the process exit code
 * @throws Exception if something goes wrong
 */
public int run(final String[] args) throws Exception {

  Cli cli = Cli.builder().setArgs(args).addOptions(CliCommonOpts.InputFileOption.values()).build();
  int result = cli.runCmd();

  if (result != 0) {
    return result;
  }

  File inputFile = new File(cli.getArgValueAsString(CliCommonOpts.InputFileOption.INPUT));

  Configuration conf = HBaseConfiguration.create();

  createTableAndColumn(conf, STOCKS_TABLE_NAME,
      STOCK_DETAILS_COLUMN_FAMILY_AS_BYTES);

  HTable htable = new HTable(conf, STOCKS_TABLE_NAME);
  htable.setAutoFlush(false);
  htable.setWriteBufferSize(1024 * 1024 * 12);


  SpecificDatumWriter<Stock> writer =
      new SpecificDatumWriter<Stock>();
  writer.setSchema(Stock.SCHEMA$);

  ByteArrayOutputStream bao = new ByteArrayOutputStream();
  BinaryEncoder encoder =
      EncoderFactory.get().directBinaryEncoder(bao, null);

  for (Stock stock: AvroStockUtils.fromCsvFile(inputFile)) {
    writer.write(stock, encoder);
    encoder.flush();

    byte[] rowkey = Bytes.add(
        Bytes.toBytes(stock.getSymbol().toString()),
        Bytes.toBytes(stock.getDate().toString()));

    byte[] stockAsAvroBytes = bao.toByteArray();

    Put put = new Put(rowkey);
    put.add(STOCK_DETAILS_COLUMN_FAMILY_AS_BYTES,
        STOCK_COLUMN_QUALIFIER_AS_BYTES,
        stockAsAvroBytes);

    htable.put(put);

    bao.reset();
  }

  htable.flushCommits();
  htable.close();
  System.out.println("done");

  return 0;
}