org.apache.orc.storage.ql.exec.vector.BytesColumnVector Java Examples

The following examples show how to use org.apache.orc.storage.ql.exec.vector.BytesColumnVector. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AbstractOrcNoHiveVector.java    From flink with Apache License 2.0 6 votes vote down vote up
private static BytesColumnVector createBytesVector(int batchSize, Object value) {
	BytesColumnVector bcv = new BytesColumnVector(batchSize);
	if (value == null) {
		bcv.noNulls = false;
		bcv.isNull[0] = true;
		bcv.isRepeating = true;
	} else {
		byte[] bytes = value instanceof byte[] ?
			(byte[]) value :
			value.toString().getBytes(StandardCharsets.UTF_8);
		bcv.initBuffer(bytes.length);
		bcv.fill(bytes);
		bcv.isNull[0] = false;
	}
	return bcv;
}
 
Example #2
Source File: GenericOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, String data, ColumnVector output) {
  if (data == null) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    byte[] value = data.getBytes(StandardCharsets.UTF_8);
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #3
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    // getBinary always makes a copy, so we don't need to worry about it
    // being changed behind our back.
    byte[] value = data.getBinary(column);
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #4
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    byte[] value = data.getUTF8String(column).getBytes();
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #5
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeArrayWriter writer, int element,
                    ColumnVector vector, int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNull(element);
  } else {
    BytesColumnVector v = (BytesColumnVector) vector;
    write(holder, writer, element, v.vector[row], v.start[row],
        v.length[row]);
  }
}
 
Example #6
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
                    int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNullAt(column);
  } else {
    BytesColumnVector v = (BytesColumnVector) vector;
    writer.write(column, v.vector[row], v.start[row], v.length[row]);
  }
}
 
Example #7
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    // getBinary always makes a copy, so we don't need to worry about it
    // being changed behind our back.
    byte[] value = data.getBinary(column);
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #8
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    byte[] value = data.getUTF8String(column).getBytes();
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #9
Source File: GenericOrcReaders.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public UUID nonNullRead(ColumnVector vector, int row) {
  BytesColumnVector bytesVector = (BytesColumnVector) vector;
  ByteBuffer buf = ByteBuffer.wrap(bytesVector.vector[row], bytesVector.start[row], bytesVector.length[row]);
  long mostSigBits = buf.getLong();
  long leastSigBits = buf.getLong();
  return new UUID(mostSigBits, leastSigBits);
}
 
Example #10
Source File: GenericOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, byte[] data, ColumnVector output) {
  if (data == null) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((BytesColumnVector) output).setRef(rowId, data, 0, data.length);
  }
}
 
Example #11
Source File: GenericOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, UUID data, ColumnVector output) {
  if (data == null) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ByteBuffer buffer = ByteBuffer.allocate(16);
    buffer.putLong(data.getMostSignificantBits());
    buffer.putLong(data.getLeastSignificantBits());
    ((BytesColumnVector) output).setRef(rowId, buffer.array(), 0, buffer.array().length);
  }
}
 
Example #12
Source File: GenericOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, ByteBuffer data, ColumnVector output) {
  if (data == null) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((BytesColumnVector) output).setRef(rowId, data.array(), 0, data.array().length);
  }
}
 
Example #13
Source File: OrcValueReaders.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public byte[] nonNullRead(ColumnVector vector, int row) {
  BytesColumnVector bytesVector = (BytesColumnVector) vector;

  return Arrays.copyOfRange(
      bytesVector.vector[row], bytesVector.start[row], bytesVector.start[row] + bytesVector.length[row]);
}
 
Example #14
Source File: SparkOrcValueReaders.java    From iceberg with Apache License 2.0 4 votes vote down vote up
@Override
public UTF8String nonNullRead(ColumnVector vector, int row) {
  BytesColumnVector bytesVector = (BytesColumnVector) vector;
  return UTF8String.fromBytes(bytesVector.vector[row], bytesVector.start[row], bytesVector.length[row]);
}
 
Example #15
Source File: GenericOrcReaders.java    From iceberg with Apache License 2.0 4 votes vote down vote up
@Override
public ByteBuffer nonNullRead(ColumnVector vector, int row) {
  BytesColumnVector bytesVector = (BytesColumnVector) vector;
  return ByteBuffer.wrap(bytesVector.vector[row], bytesVector.start[row], bytesVector.length[row]);
}
 
Example #16
Source File: GenericOrcReaders.java    From iceberg with Apache License 2.0 4 votes vote down vote up
@Override
public String nonNullRead(ColumnVector vector, int row) {
  BytesColumnVector bytesVector = (BytesColumnVector) vector;
  return new String(bytesVector.vector[row], bytesVector.start[row], bytesVector.length[row],
                    StandardCharsets.UTF_8);
}
 
Example #17
Source File: OrcNoHiveBytesVector.java    From flink with Apache License 2.0 4 votes vote down vote up
public OrcNoHiveBytesVector(BytesColumnVector vector) {
	super(vector);
	this.vector = vector;
}