org.apache.hadoop.hbase.protobuf.ProtobufUtil Java Examples

The following examples show how to use org.apache.hadoop.hbase.protobuf.ProtobufUtil. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: StatisticsWriter.java    From phoenix with Apache License 2.0 6 votes vote down vote up
public void commitStats(final List<Mutation> mutations, final StatisticsCollector statsCollector)
        throws IOException {
    User.runAsLoginUser(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws Exception {
            commitLastStatsUpdatedTime(statsCollector);
            if (mutations.size() > 0) {
                byte[] row = mutations.get(0).getRow();
                MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
                for (Mutation m : mutations) {
                    mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(getMutationType(m), m));
                }
                MutateRowsRequest mrm = mrmBuilder.build();
                CoprocessorRpcChannel channel = statsWriterTable.coprocessorService(row);
                MultiRowMutationService.BlockingInterface service = MultiRowMutationService
                        .newBlockingStub(channel);
                try {
                    service.mutateRows(null, mrm);
                } catch (ServiceException ex) {
                    ProtobufUtil.toIOException(ex);
                }
            }
            return null;
        }
    });
}
 
Example #2
Source File: CubeSegmentTupleIterator.java    From Kylin with Apache License 2.0 6 votes vote down vote up
private void closeScanner() {
    if (logger.isDebugEnabled() && scan != null) {
        logger.debug("Scan " + scan.toString());
        byte[] metricsBytes = scan.getAttribute(Scan.SCAN_ATTRIBUTES_METRICS_DATA);
        if (metricsBytes != null) {
            ScanMetrics scanMetrics = ProtobufUtil.toScanMetrics(metricsBytes);
            logger.debug("HBase Metrics: " + "count={}, ms={}, bytes={}, remote_bytes={}, regions={}, not_serving_region={}, rpc={}, rpc_retries={}, remote_rpc={}, remote_rpc_retries={}", new Object[] { scanCount, scanMetrics.sumOfMillisSecBetweenNexts, scanMetrics.countOfBytesInResults, scanMetrics.countOfBytesInRemoteResults, scanMetrics.countOfRegions, scanMetrics.countOfNSRE, scanMetrics.countOfRPCcalls, scanMetrics.countOfRPCRetries, scanMetrics.countOfRemoteRPCcalls, scanMetrics.countOfRemoteRPCRetries });
        }
    }
    try {
        if (scanner != null) {
            scanner.close();
            scanner = null;
        }
    } catch (Throwable t) {
        throw new StorageException("Error when close scanner for table " + tableName, t);
    }
}
 
Example #3
Source File: StatisticsWriter.java    From phoenix with Apache License 2.0 6 votes vote down vote up
public void commitStats(List<Mutation> mutations) throws IOException {
    if (mutations.size() > 0) {
        byte[] row = mutations.get(0).getRow();
        MutateRowsRequest.Builder mrmBuilder = MutateRowsRequest.newBuilder();
        for (Mutation m : mutations) {
            mrmBuilder.addMutationRequest(ProtobufUtil.toMutation(getMutationType(m), m));
        }
        MutateRowsRequest mrm = mrmBuilder.build();
        CoprocessorRpcChannel channel = statsWriterTable.coprocessorService(row);
        MultiRowMutationService.BlockingInterface service =
                MultiRowMutationService.newBlockingStub(channel);
        try {
          service.mutateRows(null, mrm);
        } catch (ServiceException ex) {
          ProtobufUtil.toIOException(ex);
        }
    }
}
 
Example #4
Source File: HBase.java    From pxf with Apache License 2.0 6 votes vote down vote up
private void grantPermissions(Table table,
                              String user, Action... actions)
        throws Exception {

    ReportUtils.report(report, getClass(), config.toString());
    ReportUtils.report(report, getClass(),"grant request for user=" + user + " table" + table);
    String hbaseAuthEnabled = config.get("hbase.security.authorization");
    if (!isAuthorizationEnabled && (hbaseAuthEnabled == null || !hbaseAuthEnabled.equals("true"))) {
        ReportUtils.report(report, getClass(),
                "HBase security authorization is not enabled, cannot grant permissions");
        return;
    }

    org.apache.hadoop.hbase.client.Table acl = connection.getTable(AccessControlLists.ACL_TABLE_NAME);
    try {
        BlockingRpcChannel service = acl.coprocessorService(HConstants.EMPTY_START_ROW);
        AccessControlProtos.AccessControlService.BlockingInterface protocol = AccessControlProtos.AccessControlService.newBlockingStub(service);
        if (table == null) {
            ProtobufUtil.grant(protocol, user, actions);
        } else {
            ProtobufUtil.grant(protocol, user, TableName.valueOf(table.getName()), null, null, actions);
        }
    } finally {
        acl.close();
    }
}
 
Example #5
Source File: PhoenixInputSplit.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public void write(DataOutput output) throws IOException {
    Preconditions.checkNotNull(scans);
    WritableUtils.writeVInt(output, scans.size());
    for (Scan scan : scans) {
        ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan);
        byte[] protoScanBytes = protoScan.toByteArray();
        WritableUtils.writeVInt(output, protoScanBytes.length);
        output.write(protoScanBytes);
    }
}
 
Example #6
Source File: TableScannerBuilderTest.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
protected DataScan readScan(ObjectInput in) throws IOException{
    byte[] bytes = new byte[in.readInt()];
    in.readFully(bytes);
    ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes);
    return new HScan(ProtobufUtil.toScan(scan));
}
 
Example #7
Source File: TableScannerBuilderTest.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
protected void writeScan(ObjectOutput out) throws IOException{
    Scan scan=((HScan)this.scan).unwrapDelegate();
    byte[] bytes =ProtobufUtil.toScan(scan).toByteArray();
    out.writeInt(bytes.length);
    out.write(bytes);
}
 
Example #8
Source File: SpliceTableMapReduceUtil.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * Writes the given scan into a Base64 encoded string.
 *
 * @param scan  The scan to write out.
 * @return The scan saved in a Base64 encoded string.
 * @throws IOException When writing the scan fails.
 */
public static String convertScanToString(Scan scan) throws IOException{
    ObjectOutput dos=null;
    try{
        byte[] bytes=ProtobufUtil.toScan(scan).toByteArray();
        return Base64.getEncoder().encodeToString(bytes);
    }finally{
        if(dos!=null)
            dos.close();
    }
}
 
Example #9
Source File: BulkWriteChannelInvoker.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
public BulkWritesResult invoke(BulkWrites write) throws IOException {
    TableName tableName=tableInfoFactory.getTableInfo(this.tableName);
    CoprocessorRpcChannel channel = channelFactory.newChannel(tableName,write.getRegionKey());

    boolean cacheCheck = false;
    try {
        SpliceMessage.SpliceIndexService service = ProtobufUtil.newServiceStub(SpliceMessage.SpliceIndexService.class, channel);
        SpliceMessage.BulkWriteRequest.Builder builder = SpliceMessage.BulkWriteRequest.newBuilder();
        byte[] requestBytes = compressor.compress(write);
        builder.setBytes(ZeroCopyLiteralByteString.wrap(requestBytes));
        SpliceMessage.BulkWriteRequest bwr = builder.build();

        BlockingRpcCallback<SpliceMessage.BulkWriteResponse> doneCallback =new BlockingRpcCallback<>();
        ServerRpcController controller = new ServerRpcController();
        service.bulkWrite(controller, bwr, doneCallback);
        if (controller.failed()){
            IOException error=controller.getFailedOn();
            clearCacheIfNeeded(error);
            cacheCheck=true;
            if(error!=null)
                throw pef.processRemoteException(error);
            else
                throw pef.fromErrorString(controller.errorText());
        }
        SpliceMessage.BulkWriteResponse bulkWriteResponse = doneCallback.get();
        byte[] bytes = bulkWriteResponse.getBytes().toByteArray();
        if(bytes==null || bytes.length<=0){
            Logger logger=Logger.getLogger(BulkWriteChannelInvoker.class);
            logger.error("zero-length bytes returned with a null error for encodedString: "+write.getBulkWrites().iterator().next().getEncodedStringName());
        }

        return compressor.decompress(bytes,BulkWritesResult.class);
    } catch (Exception e) {
    	if (!cacheCheck) clearCacheIfNeeded(e);
        throw pef.processRemoteException(e);
    }
}
 
Example #10
Source File: HOperationFactory.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public DataScan readScan(ObjectInput in) throws IOException{
    byte[] bytes = new byte[in.readInt()];
    in.readFully(bytes);
    ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes);
    return new HScan(ProtobufUtil.toScan(scan));
}
 
Example #11
Source File: HOperationFactory.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
public void writeScan(DataScan scan,ObjectOutput out) throws IOException{
    Scan delegate=((HScan)scan).unwrapDelegate();

    byte[] bytes=ProtobufUtil.toScan(delegate).toByteArray();
    out.writeInt(bytes.length);
    out.write(bytes);
}
 
Example #12
Source File: HBaseTxnNetworkLayer.java    From spliceengine with GNU Affero General Public License v3.0 5 votes vote down vote up
@Override
protected TxnMessage.TxnLifecycleService getLifecycleService(byte[] rowKey) throws IOException{
    TxnMessage.TxnLifecycleService service;
    CoprocessorRpcChannel coprocessorRpcChannel = channelFactory.newRetryableChannel(table.getName(), rowKey);
    try{
        service=ProtobufUtil.newServiceStub(TxnMessage.TxnLifecycleService.class,coprocessorRpcChannel);
    }catch(Exception e){
        throw new IOException(e);
    }
    return service;
}
 
Example #13
Source File: PhoenixInputSplit.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public void write(DataOutput output) throws IOException {
    WritableUtils.writeString(output, regionLocation);
    WritableUtils.writeVLong(output, regionSize);

    Preconditions.checkNotNull(scans);
    WritableUtils.writeVInt(output, scans.size());
    for (Scan scan : scans) {
        ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan);
        byte[] protoScanBytes = protoScan.toByteArray();
        WritableUtils.writeVInt(output, protoScanBytes.length);
        output.write(protoScanBytes);
    }
}
 
Example #14
Source File: PhoenixInputSplit.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    regionLocation = WritableUtils.readString(input);
    regionSize = WritableUtils.readVLong(input);
    int count = WritableUtils.readVInt(input);
    scans = Lists.newArrayListWithExpectedSize(count);
    for (int i = 0; i < count; i++) {
        byte[] protoScanBytes = new byte[WritableUtils.readVInt(input)];
        input.readFully(protoScanBytes);
        ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes);
        Scan scan = ProtobufUtil.toScan(protoScan);
        scans.add(scan);
    }
    init();
}
 
Example #15
Source File: PhoenixInputSplit.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    int count = WritableUtils.readVInt(input);
    scans = Lists.newArrayListWithExpectedSize(count);
    for (int i = 0; i < count; i++) {
        byte[] protoScanBytes = new byte[WritableUtils.readVInt(input)];
        input.readFully(protoScanBytes);
        ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes);
        Scan scan = ProtobufUtil.toScan(protoScan);
        scans.add(scan);
    }
    init();
}
 
Example #16
Source File: ProtoBufConverter.java    From eagle with Apache License 2.0 4 votes vote down vote up
public static ClientProtos.Scan toPBScan(Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
Example #17
Source File: RangerAuthorizationCoprocessor.java    From ranger with Apache License 2.0 4 votes vote down vote up
@Override
public void getUserPermissions(RpcController controller, AccessControlProtos.GetUserPermissionsRequest request,
		RpcCallback<AccessControlProtos.GetUserPermissionsResponse> done) {
	AccessControlProtos.GetUserPermissionsResponse response = null;
	try {
		String operation = "userPermissions";
		final RangerAccessResourceImpl resource = new RangerAccessResourceImpl();
		User user = getActiveUser(null);
		Set<String> groups = _userUtils.getUserGroups(user);
		if (groups.isEmpty() && user.getUGI() != null) {
			String[] groupArray = user.getUGI().getGroupNames();
			if (groupArray != null) {
				groups = Sets.newHashSet(groupArray);
			}
		}
		RangerAccessRequestImpl rangerAccessrequest = new RangerAccessRequestImpl(resource, null,
				_userUtils.getUserAsString(user), groups, null);
		rangerAccessrequest.setAction(operation);
		rangerAccessrequest.setClientIPAddress(getRemoteAddress());
		rangerAccessrequest.setResourceMatchingScope(RangerAccessRequest.ResourceMatchingScope.SELF);
		List<UserPermission> perms = null;
		if (request.getType() == AccessControlProtos.Permission.Type.Table) {
			final TableName table = request.hasTableName() ? ProtobufUtil.toTableName(request.getTableName()) : null;
			requirePermission(null, operation, table.getName(), Action.ADMIN);
			resource.setValue(RangerHBaseResource.KEY_TABLE, table.getNameAsString());
			perms = User.runAsLoginUser(new PrivilegedExceptionAction<List<UserPermission>>() {
				@Override
				public List<UserPermission> run() throws Exception {
					return getUserPermissions(
							hbasePlugin.getResourceACLs(rangerAccessrequest),
							table.getNameAsString(), false);
				}
			});
		} else if (request.getType() == AccessControlProtos.Permission.Type.Namespace) {
			final String namespace = request.getNamespaceName().toStringUtf8();
			requireGlobalPermission(null, "getUserPermissionForNamespace", namespace, Action.ADMIN);
			resource.setValue(RangerHBaseResource.KEY_TABLE, namespace + RangerHBaseResource.NAMESPACE_SEPARATOR);
			rangerAccessrequest.setRequestData(namespace);
			perms = User.runAsLoginUser(new PrivilegedExceptionAction<List<UserPermission>>() {
				@Override
				public List<UserPermission> run() throws Exception {
					return getUserPermissions(
							hbasePlugin.getResourceACLs(rangerAccessrequest),
							namespace, true);
				}
			});
		} else {
			requirePermission(null, "userPermissions", Action.ADMIN);
			perms = User.runAsLoginUser(new PrivilegedExceptionAction<List<UserPermission>>() {
				@Override
				public List<UserPermission> run() throws Exception {
					return getUserPermissions(
							hbasePlugin.getResourceACLs(rangerAccessrequest), null,
							false);
				}
			});
			if (_userUtils.isSuperUser(user)) {
				perms.add(new UserPermission(Bytes.toBytes(_userUtils.getUserAsString(user)),
						AccessControlLists.ACL_TABLE_NAME, null, Action.values()));
			}
		}
		response = AccessControlUtil.buildGetUserPermissionsResponse(perms);
	} catch (IOException ioe) {
		// pass exception back up
		ResponseConverter.setControllerException(controller, ioe);
	}
	done.run(response);
}
 
Example #18
Source File: ProtoBufConverter.java    From eagle with Apache License 2.0 4 votes vote down vote up
public static Scan fromPBScan(ClientProtos.Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
Example #19
Source File: ProtoBufConverter.java    From Eagle with Apache License 2.0 4 votes vote down vote up
public static ClientProtos.Scan toPBScan(Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
Example #20
Source File: HBaseMutationCoder.java    From beam with Apache License 2.0 4 votes vote down vote up
@Override
public Mutation decode(InputStream inStream) throws IOException {
  return ProtobufUtil.toMutation(MutationProto.parseDelimitedFrom(inStream));
}
 
Example #21
Source File: HBaseMutationCoder.java    From beam with Apache License 2.0 4 votes vote down vote up
@Override
public void encode(Mutation mutation, OutputStream outStream) throws IOException {
  MutationType type = getType(mutation);
  MutationProto proto = ProtobufUtil.toMutation(type, mutation);
  proto.writeDelimitedTo(outStream);
}
 
Example #22
Source File: HBaseResultCoder.java    From beam with Apache License 2.0 4 votes vote down vote up
@Override
public Result decode(InputStream inputStream) throws IOException {
  return ProtobufUtil.toResult(ClientProtos.Result.parseDelimitedFrom(inputStream));
}
 
Example #23
Source File: HBaseResultCoder.java    From beam with Apache License 2.0 4 votes vote down vote up
@Override
public void encode(Result value, OutputStream outputStream) throws IOException {
  ProtobufUtil.toResult(value).writeDelimitedTo(outputStream);
}
 
Example #24
Source File: HBaseIO.java    From beam with Apache License 2.0 4 votes vote down vote up
private void readObject(ObjectInputStream in) throws IOException {
  this.configuration = SerializableCoder.of(SerializableConfiguration.class).decode(in).get();
  this.tableId = StringUtf8Coder.of().decode(in);
  this.scan = ProtobufUtil.toScan(ClientProtos.Scan.parseDelimitedFrom(in));
}
 
Example #25
Source File: HBaseIO.java    From beam with Apache License 2.0 4 votes vote down vote up
private void writeObject(ObjectOutputStream out) throws IOException {
  SerializableCoder.of(SerializableConfiguration.class)
      .encode(new SerializableConfiguration(this.configuration), out);
  StringUtf8Coder.of().encode(this.tableId, out);
  ProtobufUtil.toScan(this.scan).writeDelimitedTo(out);
}
 
Example #26
Source File: HfileBulkExporter.java    From super-cloudops with Apache License 2.0 4 votes vote down vote up
/**
 * Setup scan condition if necessary.
 * 
 * @param conf
 * @param line
 * @throws IOException
 */
public static void setScanIfNecessary(Configuration conf, CommandLine line) throws IOException {
	String startRow = line.getOptionValue("startRow");
	String endRow = line.getOptionValue("endRow");
	String startTime = line.getOptionValue("startTime");
	String endTime = line.getOptionValue("endTime");

	boolean enabledScan = false;
	Scan scan = new Scan();
	// Row
	if (isNotBlank(startRow)) {
		conf.set(TableInputFormat.SCAN_ROW_START, startRow);
		scan.setStartRow(Bytes.toBytes(startRow));
		enabledScan = true;
	}
	if (isNotBlank(endRow)) {
		Assert2.hasText(startRow, "Argument for startRow and endRow are used simultaneously");
		conf.set(TableInputFormat.SCAN_ROW_STOP, endRow);
		scan.setStopRow(Bytes.toBytes(endRow));
		enabledScan = true;
	}

	// Row TimeStamp
	if (isNotBlank(startTime) && isNotBlank(endTime)) {
		conf.set(TableInputFormat.SCAN_TIMERANGE_START, startTime);
		conf.set(TableInputFormat.SCAN_TIMERANGE_END, endTime);
		try {
			Timestamp stime = new Timestamp(Long.parseLong(startTime));
			Timestamp etime = new Timestamp(Long.parseLong(endTime));
			scan.setTimeRange(stime.getTime(), etime.getTime());
			enabledScan = true;
		} catch (Exception e) {
			throw new IllegalArgumentException(String.format("Illegal startTime(%s) and endTime(%s)", startTime, endTime), e);
		}
	}

	if (enabledScan) {
		ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
		log.info("All other SCAN configuration are ignored if\n"
				+ "		 * this is specified.See TableMapReduceUtil.convertScanToString(Scan)\n"
				+ "		 * for more details.");
		conf.set(TableInputFormat.SCAN, Base64.encodeBytes(proto.toByteArray()));
	}
}
 
Example #27
Source File: ProtoBufConverter.java    From Eagle with Apache License 2.0 2 votes vote down vote up
/**
 *
 * @param scan
 * @return
 */
public static Scan fromPBScan(ClientProtos.Scan scan) throws IOException {
    return ProtobufUtil.toScan(scan);
}
 
Example #28
Source File: SpliceTableMapReduceUtil.java    From spliceengine with GNU Affero General Public License v3.0 2 votes vote down vote up
/**
 * Converts the given Base64 string back into a Scan instance.
 *
 * @param base64  The scan details.
 * @return The newly created Scan instance.
 * @throws IOException When reading the scan instance fails.
 */
public static Scan convertStringToScan(String base64) throws IOException{
    byte[] bytes= Base64.getDecoder().decode(base64);
    ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes);
    return ProtobufUtil.toScan(scan);
}
 
Example #29
Source File: IndexTool.java    From hgraphdb with Apache License 2.0 2 votes vote down vote up
/**
 * Writes the given scan into a Base64 encoded string.
 *
 * @param scan  The scan to write out.
 * @return The scan saved in a Base64 encoded string.
 * @throws IOException When writing the scan fails.
 */
static String convertScanToString(Scan scan) throws IOException {
    ClientProtos.Scan proto = ProtobufUtil.toScan(scan);
    return Base64.encodeBytes(proto.toByteArray());
}