org.apache.hadoop.hbase.client.BufferedMutatorParams Java Examples

The following examples show how to use org.apache.hadoop.hbase.client.BufferedMutatorParams. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HBaseIndexDirectMapperBase.java    From hgraphdb with Apache License 2.0 6 votes vote down vote up
@Override
protected void setup(final Context context) throws IOException, InterruptedException {
    super.setup(context);

    final Configuration configuration = context.getConfiguration();

    skipWAL = configuration.getBoolean(Constants.MAPREDUCE_INDEX_SKIP_WAL, false);

    TableName outputTable = TableName.valueOf(configuration.get(TableOutputFormat.OUTPUT_TABLE));
    BufferedMutator.ExceptionListener listener = (e, mutator) -> {
        for (int i = 0; i < e.getNumExceptions(); i++) {
            LOG.warn("Failed to send put: " + e.getRow(i));
        }
    };
    BufferedMutatorParams mutatorParms = new BufferedMutatorParams(outputTable).listener(listener);
    mutator = getGraph().connection().getBufferedMutator(mutatorParms);
}
 
Example #2
Source File: IntegrationTestBigLinkedListWithVisibility.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Override
protected void instantiateHTable() throws IOException {
  for (int i = 0; i < DEFAULT_TABLES_COUNT; i++) {
    BufferedMutatorParams params = new BufferedMutatorParams(getTableName(i));
    params.writeBufferSize(4 * 1024 * 1024);
    BufferedMutator table = connection.getBufferedMutator(params);
    this.tables[i] = table;
  }
}
 
Example #3
Source File: IntegrationTestLoadAndVerify.java    From hbase with Apache License 2.0 6 votes vote down vote up
@Override
public void setup(Context context) throws IOException {
  conf = context.getConfiguration();
  recordsToWrite = conf.getLong(NUM_TO_WRITE_KEY, NUM_TO_WRITE_DEFAULT);
  String tableName = conf.get(TABLE_NAME_KEY, TABLE_NAME_DEFAULT);
  numBackReferencesPerRow = conf.getInt(NUM_BACKREFS_KEY, NUM_BACKREFS_DEFAULT);
  this.connection = ConnectionFactory.createConnection(conf);
  mutator = connection.getBufferedMutator(
      new BufferedMutatorParams(TableName.valueOf(tableName))
          .writeBufferSize(4 * 1024 * 1024));

  String taskId = conf.get("mapreduce.task.attempt.id");
  Matcher matcher = Pattern.compile(".+_m_(\\d+_\\d+)").matcher(taskId);
  if (!matcher.matches()) {
    throw new RuntimeException("Strange task ID: " + taskId);
  }
  shortTaskId = matcher.group(1);

  rowsWritten = context.getCounter(Counters.ROWS_WRITTEN);
  refsWritten = context.getCounter(Counters.REFERENCES_WRITTEN);
}
 
Example #4
Source File: HBaseBulkLoader.java    From hgraphdb with Apache License 2.0 5 votes vote down vote up
private static BufferedMutator getBufferedMutator(HBaseGraph graph, String tableName) {
    try {
        HBaseGraphConfiguration config = graph.configuration();
        TableName name = HBaseGraphUtils.getTableName(config, tableName);
        BufferedMutatorParams params = new BufferedMutatorParams(name).listener(LISTENER);
        return graph.connection().getBufferedMutator(params);
    } catch (IOException e) {
        throw new HBaseGraphException(e);
    }
}
 
Example #5
Source File: PerformanceEvaluation.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Override
void onStartup() throws IOException {
  BufferedMutatorParams p = new BufferedMutatorParams(TableName.valueOf(opts.tableName));
  p.writeBufferSize(opts.bufferSize);
  this.mutator = connection.getBufferedMutator(p);
  this.table = connection.getTable(TableName.valueOf(opts.tableName));
}
 
Example #6
Source File: HBase10Table.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
@Override
public void setWriteBufferSize( long bufferSize ) throws IOException {
  try {
    tab.getClass().getMethod( "setWriteBufferSize", long.class ).invoke( tab, bufferSize );
  } catch ( NoSuchMethodException | IllegalAccessException | InvocationTargetException e ) {
    mutator = conn.getBufferedMutator( new BufferedMutatorParams( tab.getName() ).writeBufferSize( bufferSize ) );
  }
}
 
Example #7
Source File: HBaseUpsertSinkFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	LOG.info("start open ...");
	org.apache.hadoop.conf.Configuration config = prepareRuntimeConfiguration();
	try {
		this.helper = new HBaseReadWriteHelper(schema);
		this.numPendingRequests = new AtomicLong(0);

		if (null == connection) {
			this.connection = ConnectionFactory.createConnection(config);
		}
		// create a parameter instance, set the table name and custom listener reference.
		BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(hTableName))
			.listener(this)
			.writeBufferSize(bufferFlushMaxSizeInBytes);
		this.mutator = connection.getBufferedMutator(params);

		if (bufferFlushIntervalMillis > 0) {
			this.executor = Executors.newScheduledThreadPool(
				1, new ExecutorThreadFactory("hbase-upsert-sink-flusher"));
			this.scheduledFuture = this.executor.scheduleWithFixedDelay(() -> {
				if (closed) {
					return;
				}
				try {
					flush();
				} catch (Exception e) {
					// fail the sink and skip the rest of the items
					// if the failure handler decides to throw an exception
					failureThrowable.compareAndSet(null, e);
				}
			}, bufferFlushIntervalMillis, bufferFlushIntervalMillis, TimeUnit.MILLISECONDS);
		}
	} catch (TableNotFoundException tnfe) {
		LOG.error("The table " + hTableName + " not found ", tnfe);
		throw new RuntimeException("HBase table '" + hTableName + "' not found.", tnfe);
	} catch (IOException ioe) {
		LOG.error("Exception while creating connection to HBase.", ioe);
		throw new RuntimeException("Cannot create connection to HBase.", ioe);
	}
	LOG.info("end open.");
}
 
Example #8
Source File: HBaseDataStore.java    From uavstack with Apache License 2.0 4 votes vote down vote up
/**
 * msg 包括:
 * 
 * @param tablename
 * @param entity:
 *            rowkey->cf:column->value 其中增加对_timestamp字段的处理
 */
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
protected boolean insert(DataStoreMsg msg) {

    // 根据TABLE名进行合法验证
    Map[] maps = (Map[]) adaptor.prepareInsertObj(msg, datasource.getDataStoreConnection());
    Map<byte[], Map> entity = maps[0];
    Map<byte[], Long> entityStamp = maps[1];
    String tableName = (String) msg.get(DataStoreProtocol.HBASE_TABLE_NAME);
    // add write buffer
    BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(tableName));

    params.writeBufferSize(1024 * 1024 * 2);
    try (BufferedMutator table = datasource.getSourceConnect().getBufferedMutator(params);) {

        // 取得所有cf
        List<Put> puts = Lists.newArrayList();
        Put put = null;
        for (byte[] rowkey : entity.keySet()) {
            // 定制时间戳
            put = entityStamp.containsKey(rowkey) ? new Put(rowkey, entityStamp.get(rowkey)) : new Put(rowkey);

            // 取得column和value
            for (Object entry : entity.get(rowkey).keySet()) {

                String[] column = ((String) entry).split(":");
                put.addColumn(Bytes.toBytes(column[0]), Bytes.toBytes(column[1]),
                        Bytes.toBytes((String) entity.get(rowkey).get(entry)));
            }
            puts.add(put);
        }
        // 批量提交
        Object[] results = new Object[puts.size()];
        // table.batch(puts, results);
        table.mutate(puts);
        // flush
        table.flush();
        // 根据插入信息操作并返回结果
        return adaptor.handleInsertResult(results, msg, datasource.getDataStoreConnection());
    }
    catch (IOException e) {
        log.err(this, "INSERT HBASE TABLE[" + tableName + "] FAIL:" + msg.toJSONString(), e);
        return false;
    }

}
 
Example #9
Source File: HBaseIO.java    From beam with Apache License 2.0 4 votes vote down vote up
@StartBundle
public void startBundle(StartBundleContext c) throws IOException {
  BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(tableId));
  mutator = connection.getBufferedMutator(params);
  recordsWritten = 0;
}
 
Example #10
Source File: HBaseSinkFunction.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
	LOG.info("start open ...");
	org.apache.hadoop.conf.Configuration config = prepareRuntimeConfiguration();
	try {
		this.mutationConverter.open();
		this.numPendingRequests = new AtomicLong(0);

		if (null == connection) {
			this.connection = ConnectionFactory.createConnection(config);
		}
		// create a parameter instance, set the table name and custom listener reference.
		BufferedMutatorParams params = new BufferedMutatorParams(TableName.valueOf(hTableName))
			.listener(this);
		if (bufferFlushMaxSizeInBytes > 0) {
			params.writeBufferSize(bufferFlushMaxSizeInBytes);
		}
		this.mutator = connection.getBufferedMutator(params);

		if (bufferFlushIntervalMillis > 0) {
			this.executor = Executors.newScheduledThreadPool(
				1, new ExecutorThreadFactory("hbase-upsert-sink-flusher"));
			this.scheduledFuture = this.executor.scheduleWithFixedDelay(() -> {
				if (closed) {
					return;
				}
				try {
					flush();
				} catch (Exception e) {
					// fail the sink and skip the rest of the items
					// if the failure handler decides to throw an exception
					failureThrowable.compareAndSet(null, e);
				}
			}, bufferFlushIntervalMillis, bufferFlushIntervalMillis, TimeUnit.MILLISECONDS);
		}
	} catch (TableNotFoundException tnfe) {
		LOG.error("The table " + hTableName + " not found ", tnfe);
		throw new RuntimeException("HBase table '" + hTableName + "' not found.", tnfe);
	} catch (IOException ioe) {
		LOG.error("Exception while creating connection to HBase.", ioe);
		throw new RuntimeException("Cannot create connection to HBase.", ioe);
	}
	LOG.info("end open.");
}
 
Example #11
Source File: ThriftConnection.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Override
public BufferedMutator getBufferedMutator(BufferedMutatorParams params) throws IOException {
  throw new NotImplementedException("batchCoprocessorService not supported in ThriftTable");
}
 
Example #12
Source File: TestMultiTableInputFormatBase.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Override
public BufferedMutator getBufferedMutator(BufferedMutatorParams params) throws IOException {
  return null;
}
 
Example #13
Source File: TestTableInputFormatBase.java    From hbase with Apache License 2.0 4 votes vote down vote up
@Override
public BufferedMutator getBufferedMutator(BufferedMutatorParams params) throws IOException {
  throw new UnsupportedOperationException();
}
 
Example #14
Source File: IntegrationTestBigLinkedList.java    From hbase with Apache License 2.0 4 votes vote down vote up
protected void instantiateHTable() throws IOException {
  mutator = connection.getBufferedMutator(
      new BufferedMutatorParams(getTableName(connection.getConfiguration()))
          .writeBufferSize(4 * 1024 * 1024));
}
 
Example #15
Source File: HBaseOperations.java    From geowave with Apache License 2.0 4 votes vote down vote up
public BufferedMutator getBufferedMutator(final TableName tableName) throws IOException {
  final BufferedMutatorParams params = new BufferedMutatorParams(tableName);

  return conn.getBufferedMutator(params);
}