Java Code Examples for org.apache.hadoop.hbase.util.Bytes#readByteArray()

The following examples show how to use org.apache.hadoop.hbase.util.Bytes#readByteArray() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: InListExpression.java    From phoenix with Apache License 2.0 6 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    input.readBoolean(); // Unused, but left for b/w compat. TODO: remove in next major release
    fixedWidth = WritableUtils.readVInt(input);
    byte[] valuesBytes = Bytes.readByteArray(input);
    valuesByteLength = valuesBytes.length;
    int len = fixedWidth == -1 ? WritableUtils.readVInt(input) : valuesByteLength / fixedWidth;
    // TODO: consider using a regular HashSet as we never serialize from the server-side
    values = Sets.newLinkedHashSetWithExpectedSize(len);
    int offset = 0;
    int i  = 0;
    if (i < len) {
        offset = readValue(input, valuesBytes, offset, minValue = new ImmutableBytesPtr());
        while (++i < len-1) {
            offset = readValue(input, valuesBytes, offset, new ImmutableBytesPtr());
        }
        if (i < len) {
            offset = readValue(input, valuesBytes, offset, maxValue = new ImmutableBytesPtr());
        } else {
            maxValue = minValue;
        }
    } else {
        minValue = maxValue = new ImmutableBytesPtr(ByteUtil.EMPTY_BYTE_ARRAY);
    }
}
 
Example 2
Source File: PColumnImpl.java    From phoenix with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    byte[] columnNameBytes = Bytes.readByteArray(input);
    PName columnName = PNameFactory.newName(columnNameBytes);
    byte[] familyNameBytes = Bytes.readByteArray(input);
    PName familyName = familyNameBytes.length == 0 ? null : PNameFactory.newName(familyNameBytes);
    // TODO: optimize the reading/writing of this b/c it could likely all fit in a single byte or two
    PDataType dataType = PDataType.values()[WritableUtils.readVInt(input)];
    int maxLength = WritableUtils.readVInt(input);
    int scale = WritableUtils.readVInt(input);
    boolean nullable = input.readBoolean();
    int position = WritableUtils.readVInt(input);
    ColumnModifier columnModifier = ColumnModifier.fromSystemValue(WritableUtils.readVInt(input));
    init(columnName, familyName, dataType, maxLength == NO_MAXLENGTH ? null : maxLength,
            scale == NO_SCALE ? null : scale, nullable, position, columnModifier);
}
 
Example 3
Source File: InListExpression.java    From phoenix with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    containsNull = input.readBoolean();
    fixedWidth = WritableUtils.readVInt(input);
    byte[] valuesBytes = Bytes.readByteArray(input);
    valuesByteLength = valuesBytes.length;
    int len = fixedWidth == -1 ? WritableUtils.readVInt(input) : valuesByteLength / fixedWidth;
    values = Sets.newLinkedHashSetWithExpectedSize(len);
    int offset = 0;
    int i  = 0;
    if (i < len) {
        offset = readValue(input, valuesBytes, offset, minValue = new ImmutableBytesPtr());
        while (++i < len-1) {
            offset = readValue(input, valuesBytes, offset, new ImmutableBytesPtr());
        }
        if (i < len) {
            offset = readValue(input, valuesBytes, offset, maxValue = new ImmutableBytesPtr());
        } else {
            maxValue = minValue;
        }
    } else {
        minValue = maxValue = new ImmutableBytesPtr(ByteUtil.EMPTY_BYTE_ARRAY);
    }
}
 
Example 4
Source File: IndexSpecification.java    From hbase-secondary-index with GNU General Public License v3.0 6 votes vote down vote up
/** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
  indexId = in.readUTF();
  int numIndexedCols = in.readInt();
  indexedColumns = new byte[numIndexedCols][];
  for (int i = 0; i < numIndexedCols; i++) {
    indexedColumns[i] = Bytes.readByteArray(in);
  }
  int numAdditionalCols = in.readInt();
  additionalColumns = new byte[numAdditionalCols][];
  for (int i = 0; i < numAdditionalCols; i++) {
    additionalColumns[i] = Bytes.readByteArray(in);
  }
  makeAllColumns();
  keyGenerator = (IndexKeyGenerator) ObjectWritable.readObject(in, CONF);
  
  // FIXME this is to read the deprecated comparator, in existing data
  ObjectWritable.readObject(in, CONF);
}
 
Example 5
Source File: KeyValueCodec.java    From phoenix with Apache License 2.0 6 votes vote down vote up
/**
 * Read a single {@link KeyValue} from the input stream - may either be a regular {@link KeyValue}
 * or an {@link IndexedKeyValue}.
 * @param in to read from
 * @return the next {@link KeyValue}, if one is available
 * @throws IOException if the next {@link KeyValue} cannot be read
 */
public static KeyValue readKeyValue(DataInput in) throws IOException {
  int length = in.readInt();
  // its a special IndexedKeyValue
  if (length == INDEX_TYPE_LENGTH_MARKER) {
    ImmutableBytesPtr indexTableName = new ImmutableBytesPtr(Bytes.readByteArray(in));
    byte[] mutationData = Bytes.readByteArray(in);
    ClientProtos.MutationProto mProto = ClientProtos.MutationProto.parseFrom(mutationData);
    Mutation mutation = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto);
    IndexedKeyValue kv = null;
    if (mutation != null){
      kv = IndexedKeyValue.newIndexedKeyValue(indexTableName.copyBytesIfNecessary(), mutation);
    } else {
      kv = new IndexedKeyValue();
    }
    return kv;
  } else {
    return KeyValue.create(length, in);
  }
}
 
Example 6
Source File: HFileBlockIndex.java    From hbase with Apache License 2.0 6 votes vote down vote up
/**
 * Read in the root-level index from the given input stream. Must match
 * what was written into the root level by
 * {@link BlockIndexWriter#writeIndexBlocks(FSDataOutputStream)} at the
 * offset that function returned.
 *
 * @param in the buffered input stream or wrapped byte input stream
 * @param numEntries the number of root-level index entries
 * @throws IOException
 */
public void readRootIndex(DataInput in, final int numEntries) throws IOException {
  blockOffsets = new long[numEntries];
  initialize(numEntries);
  blockDataSizes = new int[numEntries];

  // If index size is zero, no index was written.
  if (numEntries > 0) {
    for (int i = 0; i < numEntries; ++i) {
      long offset = in.readLong();
      int dataSize = in.readInt();
      byte[] key = Bytes.readByteArray(in);
      add(key, offset, dataSize);
    }
  }
}
 
Example 7
Source File: HFileInfo.java    From hbase with Apache License 2.0 6 votes vote down vote up
/**
 * Now parse the old Writable format.  It was a list of Map entries.  Each map entry was a
 * key and a value of a byte [].  The old map format had a byte before each entry that held
 * a code which was short for the key or value type.  We know it was a byte [] so in below
 * we just read and dump it.
 */
void parseWritable(final DataInputStream in) throws IOException {
  // First clear the map.
  // Otherwise we will just accumulate entries every time this method is called.
  this.map.clear();
  // Read the number of entries in the map
  int entries = in.readInt();
  // Then read each key/value pair
  for (int i = 0; i < entries; i++) {
    byte [] key = Bytes.readByteArray(in);
    // We used to read a byte that encoded the class type.
    // Read and ignore it because it is always byte [] in hfile
    in.readByte();
    byte [] value = Bytes.readByteArray(in);
    this.map.put(key, value);
  }
}
 
Example 8
Source File: TableSplit.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * Reads the values of each field.
 *
 * @param in  The input to read from.
 * @throws IOException When reading the input fails.
 */
@Override
public void readFields(DataInput in) throws IOException {
  Version version = Version.UNVERSIONED;
  // TableSplit was not versioned in the beginning.
  // In order to introduce it now, we make use of the fact
  // that tableName was written with Bytes.writeByteArray,
  // which encodes the array length as a vint which is >= 0.
  // Hence if the vint is >= 0 we have an old version and the vint
  // encodes the length of tableName.
  // If < 0 we just read the version and the next vint is the length.
  // @see Bytes#readByteArray(DataInput)
  int len = WritableUtils.readVInt(in);
  if (len < 0) {
    // what we just read was the version
    version = Version.fromCode(len);
    len = WritableUtils.readVInt(in);
  }
  byte[] tableNameBytes = new byte[len];
  in.readFully(tableNameBytes);
  tableName = TableName.valueOf(tableNameBytes);
  startRow = Bytes.readByteArray(in);
  endRow = Bytes.readByteArray(in);
  regionLocation = Bytes.toString(Bytes.readByteArray(in));
  if (version.atLeast(Version.INITIAL)) {
    scan = Bytes.toString(Bytes.readByteArray(in));
  }
  length = WritableUtils.readVLong(in);
  if (version.atLeast(Version.WITH_ENCODED_REGION_NAME)) {
    encodedRegionName = Bytes.toString(Bytes.readByteArray(in));
  }
}
 
Example 9
Source File: TablePermission.java    From hbase with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  byte[] tableBytes = Bytes.readByteArray(in);
  if(tableBytes.length > 0) {
    table = TableName.valueOf(tableBytes);
  }
  if (in.readBoolean()) {
    family = Bytes.readByteArray(in);
  }
  if (in.readBoolean()) {
    qualifier = Bytes.readByteArray(in);
  }
}
 
Example 10
Source File: CompoundBloomFilter.java    From hbase with Apache License 2.0 5 votes vote down vote up
/**
 * De-serialization for compound Bloom filter metadata. Must be consistent
 * with what {@link CompoundBloomFilterWriter} does.
 *
 * @param meta serialized Bloom filter metadata without any magic blocks
 * @throws IOException
 */
public CompoundBloomFilter(DataInput meta, HFile.Reader reader)
    throws IOException {
  this.reader = reader;

  totalByteSize = meta.readLong();
  hashCount = meta.readInt();
  hashType = meta.readInt();
  totalKeyCount = meta.readLong();
  totalMaxKeys = meta.readLong();
  numChunks = meta.readInt();
  byte[] comparatorClassName = Bytes.readByteArray(meta);
  // The writer would have return 0 as the vint length for the case of 
  // Bytes.BYTES_RAWCOMPARATOR.  In such cases do not initialize comparator, it can be
  // null
  if (comparatorClassName.length != 0) {
    comparator = FixedFileTrailer.createComparator(Bytes.toString(comparatorClassName));
  }

  hash = Hash.getInstance(hashType);
  if (hash == null) {
    throw new IllegalArgumentException("Invalid hash type: " + hashType);
  }
  // We will pass null for ROW block
  if(comparator == null) {
    index = new HFileBlockIndex.ByteArrayKeyBlockIndexReader(1);
  } else {
    index = new HFileBlockIndex.CellBasedKeyBlockIndexReader(comparator, 1);
  }
  index.readRootIndex(meta, numChunks);
}
 
Example 11
Source File: InListExpression.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    input.readBoolean(); // Unused, but left for b/w compat. TODO: remove in next major release
    fixedWidth = WritableUtils.readVInt(input);
    byte[] valuesBytes = Bytes.readByteArray(input);
    valuesByteLength = valuesBytes.length;
    int len = fixedWidth == -1 ? WritableUtils.readVInt(input) : valuesByteLength / fixedWidth;
    // TODO: consider using a regular HashSet as we never serialize from the server-side
    values = Sets.newLinkedHashSetWithExpectedSize(len);
    hashCodeSet = false;
    int offset = 0;
    int i  = 0;
    if (i < len) {
        offset = readValue(input, valuesBytes, offset, minValue = new ImmutableBytesPtr());
        while (++i < len-1) {
            offset = readValue(input, valuesBytes, offset, new ImmutableBytesPtr());
        }
        if (i < len) {
            offset = readValue(input, valuesBytes, offset, maxValue = new ImmutableBytesPtr());
        } else {
            maxValue = minValue;
        }
    } else {
        minValue = maxValue = new ImmutableBytesPtr(ByteUtil.EMPTY_BYTE_ARRAY);
    }
}
 
Example 12
Source File: MultiKeyValueComparisonFilter.java    From phoenix with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    try {
        allCFs = input.readBoolean();
        if (!allCFs) {
            essentialCF = Bytes.readByteArray(input);
        }
    } catch (EOFException e) { // Ignore as this will occur when a 4.10 client is used
    }
    init();
}
 
Example 13
Source File: HBaseTableSplitBase.java    From SpyGlass with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput in) throws IOException {
    LOG.debug("READ ME : " + in.toString());

    this.m_tableName = Bytes.readByteArray(in);
    this.m_regionLocation = Bytes.toString(Bytes.readByteArray(in));
    this.m_regionName = Bytes.toString(Bytes.readByteArray(in));
    this.m_sourceMode = HBaseConstants.SourceMode.valueOf(Bytes.toString(Bytes
            .readByteArray(in)));
    this.m_useSalt = Bytes.toBoolean(Bytes.readByteArray(in));

    switch (this.m_sourceMode) {
        case SCAN_RANGE:
            this.m_startRow = Bytes.readByteArray(in);
            this.m_endRow = Bytes.readByteArray(in);
            this.m_endRowInclusive = Bytes.toBoolean(Bytes.readByteArray(in));
            break;

        case GET_LIST:
            this.m_versions = Bytes.toInt(Bytes.readByteArray(in));
            this.m_keyList = new TreeSet<String>();

            int m = Bytes.toInt(Bytes.readByteArray(in));

            for (int i = 0; i < m; i++) {
                this.m_keyList.add(Bytes.toString(Bytes.readByteArray(in)));
            }
            break;
    }

    this.m_timestamp = Bytes.toLong(Bytes.readByteArray(in));

    LOG.debug("READ and CREATED : " + this);
}
 
Example 14
Source File: ProjectedColumnExpression.java    From phoenix with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    schema = new KeyValueSchema();
    schema.readFields(input);
    bitSet = ValueBitSet.newInstance(schema);
    position = input.readInt();
    name = Bytes.readByteArray(input);
}
 
Example 15
Source File: IndexedKeyValue.java    From phoenix with Apache License 2.0 5 votes vote down vote up
/**
 * This method shouldn't be used - you should use {@link KeyValueCodec#readKeyValue(DataInput)} instead. Its the
 * complement to {@link #writeData(DataOutput)}.
 */
@SuppressWarnings("javadoc")
public void readFields(DataInput in) throws IOException {
    this.indexTableName = new ImmutableBytesPtr(Bytes.readByteArray(in));
    byte[] mutationData = Bytes.readByteArray(in);
    MutationProto mProto = MutationProto.parseFrom(mutationData);
    this.mutation = org.apache.hadoop.hbase.protobuf.ProtobufUtil.toMutation(mProto);
    this.hashCode = calcHashCode(indexTableName, mutation);
}
 
Example 16
Source File: TableSplit.java    From hbase with Apache License 2.0 4 votes vote down vote up
public void readFields(DataInput in) throws IOException {
  this.m_tableName = TableName.valueOf(Bytes.readByteArray(in));
  this.m_startRow = Bytes.readByteArray(in);
  this.m_endRow = Bytes.readByteArray(in);
  this.m_regionLocation = Bytes.toString(Bytes.readByteArray(in));
}
 
Example 17
Source File: KeyValueColumnExpression.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    cf = Bytes.readByteArray(input);
    cq = Bytes.readByteArray(input);
}
 
Example 18
Source File: KeyValueColumnExpression.java    From phoenix with Apache License 2.0 4 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    cf = Bytes.readByteArray(input);
    cq = Bytes.readByteArray(input);
}
 
Example 19
Source File: KeyValueColumnExpression.java    From phoenix with BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
@Override
public void readFields(DataInput input) throws IOException {
    super.readFields(input);
    cf = Bytes.readByteArray(input);
    cq = Bytes.readByteArray(input);
}
 
Example 20
Source File: SimpleIndexKeyGenerator.java    From hbase-secondary-index with GNU General Public License v3.0 4 votes vote down vote up
/** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
  column = Bytes.readByteArray(in);
}