org.apache.hadoop.hbase.protobuf.generated.ClientProtos Java Examples
The following examples show how to use
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: KeyValueCodec.java From phoenix with Apache License 2.0 | 6 votes |
/** * Read a single {@link KeyValue} from the input stream - may either be a regular {@link KeyValue} * or an {@link IndexedKeyValue}. * @param in to read from * @return the next {@link KeyValue}, if one is available * @throws IOException if the next {@link KeyValue} cannot be read */ public static KeyValue readKeyValue(DataInput in) throws IOException { int length = in.readInt(); // its a special IndexedKeyValue if (length == INDEX_TYPE_LENGTH_MARKER) { ImmutableBytesPtr indexTableName = new ImmutableBytesPtr(Bytes.readByteArray(in)); byte[] mutationData = Bytes.readByteArray(in); ClientProtos.MutationProto mProto = ClientProtos.MutationProto.parseFrom(mutationData); Mutation mutation = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto); IndexedKeyValue kv = null; if (mutation != null){ kv = IndexedKeyValue.newIndexedKeyValue(indexTableName.copyBytesIfNecessary(), mutation); } else { kv = new IndexedKeyValue(); } return kv; } else { return KeyValue.create(length, in); } }
Example #2
Source File: TableScannerBuilderTest.java From spliceengine with GNU Affero General Public License v3.0 | 5 votes |
@Override protected DataScan readScan(ObjectInput in) throws IOException{ byte[] bytes = new byte[in.readInt()]; in.readFully(bytes); ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes); return new HScan(ProtobufUtil.toScan(scan)); }
Example #3
Source File: HOperationFactory.java From spliceengine with GNU Affero General Public License v3.0 | 5 votes |
@Override public DataScan readScan(ObjectInput in) throws IOException{ byte[] bytes = new byte[in.readInt()]; in.readFully(bytes); ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes); return new HScan(ProtobufUtil.toScan(scan)); }
Example #4
Source File: HadoopShim.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Override public Class[] getHbaseDependencyClasses() { return new Class[]{ HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class, TableMapper.class, ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class }; }
Example #5
Source File: HadoopShim.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Override public Class[] getHbaseDependencyClasses() { return new Class[] { HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class, TableMapper.class, ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class }; }
Example #6
Source File: HadoopShim.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
public Class[] getHbaseDependencyClasses() { return new Class[] { HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class, JobUtil.class, TableMapper.class, FastLongHistogram.class, Snapshot.class, ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class }; }
Example #7
Source File: HadoopShim.java From pentaho-hadoop-shims with Apache License 2.0 | 5 votes |
@Override public Class[] getHbaseDependencyClasses() { return new Class[] { HConstants.class, ClientProtos.class, Put.class, CompatibilityFactory.class, TableMapper.class, ZooKeeper.class, Channel.class, Message.class, Lists.class, Trace.class, MetricsRegistry.class }; }
Example #8
Source File: PhoenixInputSplit.java From phoenix with Apache License 2.0 | 5 votes |
@Override public void write(DataOutput output) throws IOException { WritableUtils.writeString(output, regionLocation); WritableUtils.writeVLong(output, regionSize); Preconditions.checkNotNull(scans); WritableUtils.writeVInt(output, scans.size()); for (Scan scan : scans) { ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan); byte[] protoScanBytes = protoScan.toByteArray(); WritableUtils.writeVInt(output, protoScanBytes.length); output.write(protoScanBytes); } }
Example #9
Source File: PhoenixInputSplit.java From phoenix with Apache License 2.0 | 5 votes |
@Override public void readFields(DataInput input) throws IOException { regionLocation = WritableUtils.readString(input); regionSize = WritableUtils.readVLong(input); int count = WritableUtils.readVInt(input); scans = Lists.newArrayListWithExpectedSize(count); for (int i = 0; i < count; i++) { byte[] protoScanBytes = new byte[WritableUtils.readVInt(input)]; input.readFully(protoScanBytes); ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes); Scan scan = ProtobufUtil.toScan(protoScan); scans.add(scan); } init(); }
Example #10
Source File: PhoenixInputSplit.java From phoenix with Apache License 2.0 | 5 votes |
@Override public void write(DataOutput output) throws IOException { Preconditions.checkNotNull(scans); WritableUtils.writeVInt(output, scans.size()); for (Scan scan : scans) { ClientProtos.Scan protoScan = ProtobufUtil.toScan(scan); byte[] protoScanBytes = protoScan.toByteArray(); WritableUtils.writeVInt(output, protoScanBytes.length); output.write(protoScanBytes); } }
Example #11
Source File: PhoenixInputSplit.java From phoenix with Apache License 2.0 | 5 votes |
@Override public void readFields(DataInput input) throws IOException { int count = WritableUtils.readVInt(input); scans = Lists.newArrayListWithExpectedSize(count); for (int i = 0; i < count; i++) { byte[] protoScanBytes = new byte[WritableUtils.readVInt(input)]; input.readFully(protoScanBytes); ClientProtos.Scan protoScan = ClientProtos.Scan.parseFrom(protoScanBytes); Scan scan = ProtobufUtil.toScan(protoScan); scans.add(scan); } init(); }
Example #12
Source File: ProtoBufConverter.java From eagle with Apache License 2.0 | 4 votes |
public static ClientProtos.Scan toPBScan(Scan scan) throws IOException { return ProtobufUtil.toScan(scan); }
Example #13
Source File: ProtoBufConverter.java From eagle with Apache License 2.0 | 4 votes |
public static Scan fromPBScan(ClientProtos.Scan scan) throws IOException { return ProtobufUtil.toScan(scan); }
Example #14
Source File: ProtoBufConverter.java From Eagle with Apache License 2.0 | 4 votes |
public static ClientProtos.Scan toPBScan(Scan scan) throws IOException { return ProtobufUtil.toScan(scan); }
Example #15
Source File: HBaseResultCoder.java From beam with Apache License 2.0 | 4 votes |
@Override public Result decode(InputStream inputStream) throws IOException { return ProtobufUtil.toResult(ClientProtos.Result.parseDelimitedFrom(inputStream)); }
Example #16
Source File: HBaseIO.java From beam with Apache License 2.0 | 4 votes |
private void readObject(ObjectInputStream in) throws IOException { this.configuration = SerializableCoder.of(SerializableConfiguration.class).decode(in).get(); this.tableId = StringUtf8Coder.of().decode(in); this.scan = ProtobufUtil.toScan(ClientProtos.Scan.parseDelimitedFrom(in)); }
Example #17
Source File: HfileBulkExporter.java From super-cloudops with Apache License 2.0 | 4 votes |
/** * Setup scan condition if necessary. * * @param conf * @param line * @throws IOException */ public static void setScanIfNecessary(Configuration conf, CommandLine line) throws IOException { String startRow = line.getOptionValue("startRow"); String endRow = line.getOptionValue("endRow"); String startTime = line.getOptionValue("startTime"); String endTime = line.getOptionValue("endTime"); boolean enabledScan = false; Scan scan = new Scan(); // Row if (isNotBlank(startRow)) { conf.set(TableInputFormat.SCAN_ROW_START, startRow); scan.setStartRow(Bytes.toBytes(startRow)); enabledScan = true; } if (isNotBlank(endRow)) { Assert2.hasText(startRow, "Argument for startRow and endRow are used simultaneously"); conf.set(TableInputFormat.SCAN_ROW_STOP, endRow); scan.setStopRow(Bytes.toBytes(endRow)); enabledScan = true; } // Row TimeStamp if (isNotBlank(startTime) && isNotBlank(endTime)) { conf.set(TableInputFormat.SCAN_TIMERANGE_START, startTime); conf.set(TableInputFormat.SCAN_TIMERANGE_END, endTime); try { Timestamp stime = new Timestamp(Long.parseLong(startTime)); Timestamp etime = new Timestamp(Long.parseLong(endTime)); scan.setTimeRange(stime.getTime(), etime.getTime()); enabledScan = true; } catch (Exception e) { throw new IllegalArgumentException(String.format("Illegal startTime(%s) and endTime(%s)", startTime, endTime), e); } } if (enabledScan) { ClientProtos.Scan proto = ProtobufUtil.toScan(scan); log.info("All other SCAN configuration are ignored if\n" + " * this is specified.See TableMapReduceUtil.convertScanToString(Scan)\n" + " * for more details."); conf.set(TableInputFormat.SCAN, Base64.encodeBytes(proto.toByteArray())); } }
Example #18
Source File: ProtoBufConverter.java From Eagle with Apache License 2.0 | 2 votes |
/** * * @param scan * @return */ public static Scan fromPBScan(ClientProtos.Scan scan) throws IOException { return ProtobufUtil.toScan(scan); }
Example #19
Source File: IndexTool.java From hgraphdb with Apache License 2.0 | 2 votes |
/** * Writes the given scan into a Base64 encoded string. * * @param scan The scan to write out. * @return The scan saved in a Base64 encoded string. * @throws IOException When writing the scan fails. */ static String convertScanToString(Scan scan) throws IOException { ClientProtos.Scan proto = ProtobufUtil.toScan(scan); return Base64.encodeBytes(proto.toByteArray()); }
Example #20
Source File: SpliceTableMapReduceUtil.java From spliceengine with GNU Affero General Public License v3.0 | 2 votes |
/** * Converts the given Base64 string back into a Scan instance. * * @param base64 The scan details. * @return The newly created Scan instance. * @throws IOException When reading the scan instance fails. */ public static Scan convertStringToScan(String base64) throws IOException{ byte[] bytes= Base64.getDecoder().decode(base64); ClientProtos.Scan scan=ClientProtos.Scan.parseFrom(bytes); return ProtobufUtil.toScan(scan); }