org.apache.orc.storage.ql.exec.vector.ColumnVector Java Examples

The following examples show how to use org.apache.orc.storage.ql.exec.vector.ColumnVector. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 6 votes vote down vote up
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ArrayData value = data.getArray(column);
    ListColumnVector cv = (ListColumnVector) output;
    // record the length and start of the list elements
    cv.lengths[rowId] = value.numElements();
    cv.offsets[rowId] = cv.childCount;
    cv.childCount += cv.lengths[rowId];
    // make sure the child is big enough
    cv.child.ensureSize(cv.childCount, true);
    // Add each element
    for(int e=0; e < cv.lengths[rowId]; ++e) {
      children.addValue((int) (e + cv.offsets[rowId]), e, value, cv.child);
    }
  }
}
 
Example #2
Source File: GenericOrcWriter.java    From iceberg with Apache License 2.0 6 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public void addValue(int rowId, List data, ColumnVector output) {
  if (data == null) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    List<Object> value = (List<Object>) data;
    ListColumnVector cv = (ListColumnVector) output;
    // record the length and start of the list elements
    cv.lengths[rowId] = value.size();
    cv.offsets[rowId] = cv.childCount;
    cv.childCount += cv.lengths[rowId];
    // make sure the child is big enough
    cv.child.ensureSize(cv.childCount, true);
    // Add each element
    for (int e = 0; e < cv.lengths[rowId]; ++e) {
      children.addValue((int) (e + cv.offsets[rowId]), value.get(e), cv.child);
    }
  }
}
 
Example #3
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 6 votes vote down vote up
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    MapData map = data.getMap(column);
    ArrayData key = map.keyArray();
    ArrayData value = map.valueArray();
    MapColumnVector cv = (MapColumnVector) output;
    // record the length and start of the list elements
    cv.lengths[rowId] = value.numElements();
    cv.offsets[rowId] = cv.childCount;
    cv.childCount += cv.lengths[rowId];
    // make sure the child is big enough
    cv.keys.ensureSize(cv.childCount, true);
    cv.values.ensureSize(cv.childCount, true);
    // Add each element
    for(int e=0; e < cv.lengths[rowId]; ++e) {
      int pos = (int)(e + cv.offsets[rowId]);
      keyConverter.addValue(pos, e, key, cv.keys);
      valueConverter.addValue(pos, e, value, cv.values);
    }
  }
}
 
Example #4
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    byte[] value = data.getUTF8String(column).getBytes();
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #5
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((DoubleColumnVector) output).vector[rowId] = data.getDouble(column);
  }
}
 
Example #6
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
                    int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNullAt(column);
  } else {
    writer.write(column, (int) ((LongColumnVector) vector).vector[row]);
  }
}
 
Example #7
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeArrayWriter writer, int element,
                    ColumnVector vector, int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNull(element);
  } else {
    writer.write(element, (int) ((LongColumnVector) vector).vector[row]);
  }
}
 
Example #8
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
                    int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNullAt(column);
  } else {
    writer.write(column, ((LongColumnVector) vector).vector[row]);
  }
}
 
Example #9
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
                    int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNullAt(column);
  } else {
    writer.write(column, (float) ((DoubleColumnVector) vector).vector[row]);
  }
}
 
Example #10
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeArrayWriter writer, int element,
                    ColumnVector vector, int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNull(element);
  } else {
    writer.write(element, (float) ((DoubleColumnVector) vector).vector[row]);
  }
}
 
Example #11
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
                    int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNullAt(column);
  } else {
    HiveDecimalWritable v = ((DecimalColumnVector) vector).vector[row];
    writer.write(column,
        new Decimal().set(hack.unscaledLong(v), precision, v.scale()),
        precision, scale);
  }
}
 
Example #12
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeArrayWriter writer, int element,
                    ColumnVector vector, int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNull(element);
  } else {
    writer.write(element, ((DoubleColumnVector) vector).vector[row]);
  }
}
 
Example #13
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
                    int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNullAt(column);
  } else {
    writer.write(column, convert((TimestampColumnVector) vector, row));
  }
}
 
Example #14
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeArrayWriter writer, int element,
                    ColumnVector vector, int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNull(element);
  } else {
    writer.write(element, (short) ((LongColumnVector) vector).vector[row]);
  }
}
 
Example #15
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((DecimalColumnVector) output).vector[rowId].setFromLongAndScale(
        data.getDecimal(column, precision, scale).toUnscaledLong(), scale);
  }
}
 
Example #16
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    TimestampColumnVector cv = (TimestampColumnVector) output;
    long micros = data.getLong(column);
    cv.time[rowId] = micros / 1_000; // millis
    cv.nanos[rowId] = (int) (micros % 1_000_000) * 1_000; // nanos
  }
}
 
Example #17
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    // getBinary always makes a copy, so we don't need to worry about it
    // being changed behind our back.
    byte[] value = data.getBinary(column);
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #18
Source File: GenericOrcReaders.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public Map<?, ?> nonNullRead(ColumnVector vector, int row) {
  MapColumnVector mapVector = (MapColumnVector) vector;
  int offset = (int) mapVector.offsets[row];
  long length = mapVector.lengths[row];
  Map<Object, Object> map = Maps.newHashMapWithExpectedSize((int) length);
  for (int c = 0; c < length; c++) {
    map.put(
        keyReader.read(mapVector.keys, offset + c),
        valueReader.read(mapVector.values, offset + c));
  }
  return map;
}
 
Example #19
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((DoubleColumnVector) output).vector[rowId] = data.getDouble(column);
  }
}
 
Example #20
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((DoubleColumnVector) output).vector[rowId] = data.getFloat(column);
  }
}
 
Example #21
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((LongColumnVector) output).vector[rowId] = data.getLong(column);
  }
}
 
Example #22
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((LongColumnVector) output).vector[rowId] = data.getInt(column);
  }
}
 
Example #23
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((LongColumnVector) output).vector[rowId] = data.getShort(column);
  }
}
 
Example #24
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((LongColumnVector) output).vector[rowId] = data.getByte(column);
  }
}
 
Example #25
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    ((LongColumnVector) output).vector[rowId] = data.getBoolean(column) ? 1 : 0;
  }
}
 
Example #26
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeArrayWriter writer, int element,
                    ColumnVector vector, int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNull(element);
  } else {
    int start = writeList((ListColumnVector) vector, row);
    writer.setOffsetAndSize(element, start, holder.cursor - start);
  }
}
 
Example #27
Source File: SparkOrcWriter.java    From iceberg with Apache License 2.0 5 votes vote down vote up
public void addValue(int rowId, int column, SpecializedGetters data,
                     ColumnVector output) {
  if (data.isNullAt(column)) {
    output.noNulls = false;
    output.isNull[rowId] = true;
  } else {
    output.isNull[rowId] = false;
    // getBinary always makes a copy, so we don't need to worry about it
    // being changed behind our back.
    byte[] value = data.getBinary(column);
    ((BytesColumnVector) output).setRef(rowId, value, 0, value.length);
  }
}
 
Example #28
Source File: SparkOrcReader.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public void convert(UnsafeRowWriter writer, int column, ColumnVector vector,
                    int row) {
  if (vector.isRepeating) {
    row = 0;
  }
  if (!vector.noNulls && vector.isNull[row]) {
    writer.setNullAt(column);
  } else {
    BytesColumnVector v = (BytesColumnVector) vector;
    writer.write(column, v.vector[row], v.start[row], v.length[row]);
  }
}
 
Example #29
Source File: SparkOrcValueReaders.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public ArrayData nonNullRead(ColumnVector vector, int row) {
  ListColumnVector listVector = (ListColumnVector) vector;
  int offset = (int) listVector.offsets[row];
  int length = (int) listVector.lengths[row];
  List<Object> elements = Lists.newArrayListWithExpectedSize(length);
  for (int c = 0; c < length; ++c) {
    elements.add(elementReader.read(listVector.child, offset + c));
  }
  return new GenericArrayData(elements.toArray());
}
 
Example #30
Source File: GenericOrcReaders.java    From iceberg with Apache License 2.0 5 votes vote down vote up
@Override
public List<?> nonNullRead(ColumnVector vector, int row) {
  ListColumnVector listVector = (ListColumnVector) vector;
  int offset = (int) listVector.offsets[row];
  int length = (int) listVector.lengths[row];
  List<Object> elements = Lists.newArrayListWithExpectedSize(length);
  for (int c = 0; c < length; ++c) {
    elements.add(elementReader.read(listVector.child, offset + c));
  }
  return elements;
}